|
38 | 38 | # Verify that the CSV columns and the YML keys match |
39 | 39 | csv_valid = nu.csv_validator(filename = filename, |
40 | 40 | yml_data = yml_data) |
41 | | - try: |
42 | | - logfile.append('=== Inserting new Site ===') |
43 | | - uploader['siteid'] = nu.insert_site(cur = cur, |
44 | | - yml_dict = yml_dict, |
45 | | - csv_template = csv_template) |
46 | | - logfile.append(f"siteid: {uploader['siteid']}") |
47 | | - |
48 | | - # logfile.append('=== Inserting Site Geopol ===') |
49 | | - # uploader['geopolid'] = nu.insert_geopol(cur = cur, |
50 | | - # yml_dict = yml_dict, |
51 | | - # csv_template = csv_template, |
52 | | - # uploader = uploader) |
53 | | - # logfile.append(f"Geopolitical Unit: {uploader['geopolid']}") |
54 | | - |
55 | | - logfile.append('=== Inserting Collection Units ===') |
56 | | - uploader['collunitid'] = nu.insert_collunit(cur = cur, |
| 41 | + #try: |
| 42 | + logfile.append('=== Inserting new Site ===') |
| 43 | + uploader['siteid'] = nu.insert_site(cur = cur, |
| 44 | + yml_dict = yml_dict, |
| 45 | + csv_template = csv_template) |
| 46 | + logfile.append(f"siteid: {uploader['siteid']}") |
| 47 | + |
| 48 | + # logfile.append('=== Inserting Site Geopol ===') |
| 49 | + # uploader['geopolid'] = nu.insert_geopol(cur = cur, |
| 50 | + # yml_dict = yml_dict, |
| 51 | + # csv_template = csv_template, |
| 52 | + # uploader = uploader) |
| 53 | + # logfile.append(f"Geopolitical Unit: {uploader['geopolid']}") |
| 54 | + |
| 55 | + logfile.append('=== Inserting Collection Units ===') |
| 56 | + uploader['collunitid'] = nu.insert_collunit(cur = cur, |
| 57 | + yml_dict = yml_dict, |
| 58 | + csv_template = csv_template, |
| 59 | + uploader = uploader) |
| 60 | + logfile.append(f"collunitid: {uploader['collunitid']}") |
| 61 | + |
| 62 | + logfile.append('=== Inserting Analysis Units ===') |
| 63 | + uploader['anunits'] = nu.insert_analysisunit(cur = cur, |
57 | 64 | yml_dict = yml_dict, |
58 | 65 | csv_template = csv_template, |
59 | 66 | uploader = uploader) |
60 | | - logfile.append(f"collunitid: {uploader['collunitid']}") |
| 67 | + logfile.append(f"anunits: {uploader['anunits']}") |
61 | 68 |
|
62 | | - logfile.append('=== Inserting Analysis Units ===') |
63 | | - uploader['anunits'] = nu.insert_analysisunit(cur = cur, |
64 | | - yml_dict = yml_dict, |
65 | | - csv_template = csv_template, |
66 | | - uploader = uploader) |
67 | | - logfile.append(f"anunits: {uploader['anunits']}") |
| 69 | + logfile.append('=== Inserting Chronology ===') |
| 70 | + uploader['chronology'] = nu.insert_chronology(cur = cur, |
| 71 | + yml_dict = yml_dict, |
| 72 | + csv_template = csv_template, |
| 73 | + uploader = uploader) |
| 74 | + logfile.append(f"chronology: {uploader['chronology']}") |
68 | 75 |
|
69 | | - logfile.append('=== Inserting Chronology ===') |
70 | | - uploader['chronology'] = nu.insert_chronology(cur = cur, |
| 76 | + logfile.append('=== Inserting Chroncontrol ===') |
| 77 | + uploader['chroncontrol'] = nu.insert_chron_control(cur = cur, |
71 | 78 | yml_dict = yml_dict, |
72 | 79 | csv_template = csv_template, |
73 | 80 | uploader = uploader) |
74 | | - logfile.append(f"chronology: {uploader['chronology']}") |
| 81 | + logfile.append(f"chroncontrol: {uploader['chroncontrol']}") |
75 | 82 |
|
76 | | - logfile.append('=== Inserting Chroncontrol ===') |
77 | | - uploader['chroncontrol'] = nu.insert_chron_control(cur = cur, |
78 | | - yml_dict = yml_dict, |
79 | | - csv_template = csv_template, |
80 | | - uploader = uploader) |
81 | | - logfile.append(f"chroncontrol: {uploader['chroncontrol']}") |
| 83 | + logfile.append('=== Inserting Dataset ===') |
| 84 | + uploader['datasetid'] = nu.insert_dataset(cur = cur, |
| 85 | + yml_dict = yml_dict, |
| 86 | + csv_template = csv_template, |
| 87 | + uploader = uploader) |
| 88 | + logfile.append(f"datasetid: {uploader['datasetid']}") |
82 | 89 |
|
83 | | - logfile.append('=== Inserting Dataset ===') |
84 | | - uploader['datasetid'] = nu.insert_dataset(cur = cur, |
| 90 | + logfile.append('=== Inserting Dataset PI ===') |
| 91 | + uploader['datasetpi'] = nu.insert_dataset_pi(cur = cur, |
85 | 92 | yml_dict = yml_dict, |
86 | 93 | csv_template = csv_template, |
87 | 94 | uploader = uploader) |
88 | | - logfile.append(f"datasetid: {uploader['datasetid']}") |
| 95 | + logfile.append(f"datasetPI: {uploader['datasetpi']}") |
89 | 96 |
|
90 | | - logfile.append('=== Inserting Dataset PI ===') |
91 | | - uploader['datasetpi'] = nu.insert_dataset_pi(cur = cur, |
| 97 | + logfile.append('=== Inserting Data Processor ===') |
| 98 | + uploader['processor'] = nu.insert_data_processor(cur = cur, |
92 | 99 | yml_dict = yml_dict, |
93 | 100 | csv_template = csv_template, |
94 | 101 | uploader = uploader) |
95 | | - logfile.append(f"datasetPI: {uploader['datasetpi']}") |
96 | | - |
97 | | - logfile.append('=== Inserting Data Processor ===') |
98 | | - uploader['processor'] = nu.insert_data_processor(cur = cur, |
99 | | - yml_dict = yml_dict, |
100 | | - csv_template = csv_template, |
101 | | - uploader = uploader) |
102 | | - logfile.append(f"dataset Processor: {uploader['processor']}") |
103 | | - |
104 | | - # Not sure where to get this information from |
105 | | - # logfile.append('=== Inserting Repository ===') |
106 | | - # uploader['repository'] = nu.insert_dataset_repository(cur = cur, |
107 | | - # yml_dict = yml_dict, |
108 | | - # csv_template = csv_template, |
109 | | - # uploader = uploader) |
110 | | - # logfile.append(f"dataset Processor: {uploader['repository']}") |
111 | | - |
112 | | - logfile.append('=== Inserting Dataset Database ===') |
113 | | - uploader['database'] = nu.insert_dataset_database(cur = cur, |
114 | | - yml_dict = yml_dict, |
115 | | - uploader = uploader) |
116 | | - logfile.append(f"Dataset Database: {uploader['database']}") |
117 | | - |
118 | | - logfile.append('=== Inserting Samples ===') |
119 | | - uploader['samples'] = nu.insert_sample(cur, |
120 | | - yml_dict = yml_dict, |
121 | | - csv_template = csv_template, |
122 | | - uploader = uploader) |
123 | | - logfile.append(f"Dataset Samples: {uploader['samples']}") |
| 102 | + logfile.append(f"dataset Processor: {uploader['processor']}") |
| 103 | + |
| 104 | + # Not sure where to get this information from |
| 105 | + # logfile.append('=== Inserting Repository ===') |
| 106 | + # uploader['repository'] = nu.insert_dataset_repository(cur = cur, |
| 107 | + # yml_dict = yml_dict, |
| 108 | + # csv_template = csv_template, |
| 109 | + # uploader = uploader) |
| 110 | + # logfile.append(f"dataset Processor: {uploader['repository']}") |
| 111 | + |
| 112 | + logfile.append('=== Inserting Dataset Database ===') |
| 113 | + uploader['database'] = nu.insert_dataset_database(cur = cur, |
| 114 | + yml_dict = yml_dict, |
| 115 | + uploader = uploader) |
| 116 | + logfile.append(f"Dataset Database: {uploader['database']}") |
124 | 117 |
|
125 | | - logfile.append('=== Inserting Sample Analyst ===') |
126 | | - uploader['sampleAnalyst'] = nu.insert_sample_analyst(cur, |
127 | | - yml_dict = yml_dict, |
128 | | - csv_template = csv_template, |
129 | | - uploader = uploader) |
130 | | - logfile.append(f"Sample Analyst: {uploader['sampleAnalyst']}") |
| 118 | + logfile.append('=== Inserting Samples ===') |
| 119 | + uploader['samples'] = nu.insert_sample(cur, |
| 120 | + yml_dict = yml_dict, |
| 121 | + csv_template = csv_template, |
| 122 | + uploader = uploader) |
| 123 | + logfile.append(f"Dataset Samples: {uploader['samples']}") |
| 124 | + print(uploader['samples']) |
131 | 125 |
|
132 | | - logfile.append('=== Inserting Sample Age ===') |
133 | | - uploader['sampleAge'] = nu.insert_sample_age(cur, |
134 | | - yml_dict = yml_dict, |
135 | | - csv_template = csv_template, |
136 | | - uploader = uploader) |
137 | | - logfile.append(f"Sample Age: {uploader['sampleAge']}") |
| 126 | + logfile.append('=== Inserting Sample Analyst ===') |
| 127 | + uploader['sampleAnalyst'] = nu.insert_sample_analyst(cur, |
| 128 | + yml_dict = yml_dict, |
| 129 | + csv_template = csv_template, |
| 130 | + uploader = uploader) |
| 131 | + logfile.append(f"Sample Analyst: {uploader['sampleAnalyst']}") |
138 | 132 |
|
139 | | - logfile.append('=== Inserting Data ===') |
140 | | - uploader['data'] = nu.insert_data(cur, |
| 133 | + logfile.append('=== Inserting Sample Age ===') |
| 134 | + uploader['sampleAge'] = nu.insert_sample_age(cur, |
141 | 135 | yml_dict = yml_dict, |
142 | 136 | csv_template = csv_template, |
143 | 137 | uploader = uploader) |
144 | | - logfile.append(f"Data: {uploader['data']}") |
145 | | - |
146 | | - with open(filename + '.upload.log', 'w', encoding = "utf-8") as writer: |
147 | | - for i in logfile: |
148 | | - writer.write(i) |
149 | | - writer.write('\n') |
150 | | - |
151 | | - except Exception as e: |
152 | | - logfile.append(f"File: {filename} could not be uploaded. Review logs.") |
153 | | - logfile.append(f"An exception occurred: {str(e)}") |
154 | | - with open(filename + '.upload.log', 'w', encoding = "utf-8") as writer: |
155 | | - for i in logfile: |
156 | | - writer.write(i) |
157 | | - writer.write('\n') |
| 138 | + logfile.append(f"Sample Age: {uploader['sampleAge']}") |
| 139 | + |
| 140 | + logfile.append('=== Inserting Data ===') |
| 141 | + uploader['data'] = nu.insert_data(cur, |
| 142 | + yml_dict = yml_dict, |
| 143 | + csv_template = csv_template, |
| 144 | + uploader = uploader) |
| 145 | + logfile.append(f"Data: {uploader['data']}") |
| 146 | + |
| 147 | + with open(filename + '.upload.log', 'w', encoding = "utf-8") as writer: |
| 148 | + for i in logfile: |
| 149 | + writer.write(i) |
| 150 | + writer.write('\n') |
| 151 | + |
| 152 | + #except Exception as e: |
| 153 | + # logfile.append(f"File: {filename} could not be uploaded. Review logs.") |
| 154 | + # logfile.append(f"An exception occurred: {str(e)}") |
| 155 | + # with open(filename + '.upload.log', 'w', encoding = "utf-8") as writer: |
| 156 | + # for i in logfile: |
| 157 | + # writer.write(i) |
| 158 | + # writer.write('\n') |
158 | 159 |
|
159 | 160 |
|
160 | 161 | # conn.commit() |
|
0 commit comments