|
1 | 1 | import json |
2 | 2 | import requests |
3 | | -import secrets |
4 | 3 | import time |
5 | 4 | import csv |
6 | 5 | from datetime import datetime |
7 | 6 | import urllib3 |
8 | 7 | import argparse |
9 | 8 |
|
10 | | -secretsVersion = input('To edit production server, enter the name of the secrets file: ') |
| 9 | +secretsVersion = input('To edit production server, enter the name of the \ |
| 10 | +secrets file: ') |
11 | 11 | if secretsVersion != '': |
12 | 12 | try: |
13 | 13 | secrets = __import__(secretsVersion) |
14 | 14 | print('Editing Production') |
15 | 15 | except ImportError: |
| 16 | + secrets = __import__(secrets) |
16 | 17 | print('Editing Stage') |
17 | 18 | else: |
18 | 19 | print('Editing Stage') |
19 | 20 |
|
20 | 21 | parser = argparse.ArgumentParser() |
21 | | -parser.add_argument('-k', '--key', help='the key to be added. optional - if not provided, the script will ask for input') |
22 | | -parser.add_argument('-v', '--value', help='the value to be added. optional - if not provided, the script will ask for input') |
23 | | -parser.add_argument('-l', '--language', help='the language tag to be added. optional - if not provided, the script will ask for input') |
24 | | -parser.add_argument('-i', '--handle', help='handle of the collection. optional - if not provided, the script will ask for input') |
| 22 | +parser.add_argument('-k', '--key', help='the key to be added. optional - if \ |
| 23 | +not provided, the script will ask for input') |
| 24 | +parser.add_argument('-v', '--value', help='the value to be added. optional - \ |
| 25 | +if not provided, the script will ask for input') |
| 26 | +parser.add_argument('-l', '--language', help='the language tag to be added. \ |
| 27 | +optional - if not provided, the script will ask for input') |
| 28 | +parser.add_argument('-i', '--handle', help='handle of the collection. optional \ |
| 29 | +- if not provided, the script will ask for input') |
25 | 30 | args = parser.parse_args() |
26 | 31 |
|
27 | 32 | if args.key: |
|
51 | 56 | skippedCollections = secrets.skippedCollections |
52 | 57 |
|
53 | 58 | startTime = time.time() |
54 | | -data = {'email':email,'password':password} |
55 | | -header = {'content-type':'application/json','accept':'application/json'} |
56 | | -session = requests.post(baseURL+'/rest/login', headers=header, verify=verify, params=data).cookies['JSESSIONID'] |
| 59 | +data = {'email': email, 'password': password} |
| 60 | +header = {'content-type': 'application/json', 'accept': 'application/json'} |
| 61 | +session = requests.post(baseURL + '/rest/login', headers=header, |
| 62 | + verify=verify, params=data).cookies['JSESSIONID'] |
57 | 63 | cookies = {'JSESSIONID': session} |
58 | | -headerFileUpload = {'accept':'application/json'} |
| 64 | +headerFileUpload = {'accept': 'application/json'} |
59 | 65 |
|
60 | | -status = requests.get(baseURL+'/rest/status', headers=header, cookies=cookies, verify=verify).json() |
| 66 | +status = requests.get(baseURL + '/rest/status', headers=header, |
| 67 | + cookies=cookies, verify=verify).json() |
61 | 68 | print('authenticated') |
62 | 69 |
|
63 | 70 | itemList = [] |
64 | | -endpoint = baseURL+'/rest/handle/'+collectionHandle |
65 | | -collection = requests.get(endpoint, headers=header, cookies=cookies, verify=verify).json() |
| 71 | +endpoint = baseURL + '/rest/handle/' + handle |
| 72 | +collection = requests.get(endpoint, headers=header, cookies=cookies, |
| 73 | + verify=verify).json() |
66 | 74 | collectionID = collection['uuid'] |
67 | 75 | offset = 0 |
68 | 76 | items = '' |
69 | 77 | while items != []: |
70 | | - items = requests.get(baseURL+'/rest/collections/'+str(collectionID)+'/items?limit=200&offset='+str(offset), headers=header, cookies=cookies, verify=verify) |
| 78 | + items = requests.get(baseURL + '/rest/collections/' + str(collectionID) |
| 79 | + + '/items?limit=200&offset=' + str(offset), |
| 80 | + headers=header, cookies=cookies, verify=verify) |
71 | 81 | while items.status_code != 200: |
72 | 82 | time.sleep(5) |
73 | | - items = requests.get(baseURL+'/rest/collections/'+str(collectionID)+'/items?limit=200&offset='+str(offset), headers=header, cookies=cookies, verify=verify) |
| 83 | + items = requests.get(baseURL + '/rest/collections/' + str(collectionID) |
| 84 | + + '/items?limit=200&offset=' + str(offset), |
| 85 | + headers=header, cookies=cookies, verify=verify) |
74 | 86 | items = items.json() |
75 | | - for k in range (0, len (items)): |
| 87 | + for k in range(0, len(items)): |
76 | 88 | itemID = items[k]['uuid'] |
77 | 89 | itemList.append(itemID) |
78 | 90 | offset = offset + 200 |
79 | 91 | elapsedTime = time.time() - startTime |
80 | 92 | m, s = divmod(elapsedTime, 60) |
81 | 93 | h, m = divmod(m, 60) |
82 | | -print('Item list creation time: ','%d:%02d:%02d' % (h, m, s)) |
| 94 | +print('Item list creation time: ', '%d:%02d:%02d' % (h, m, s)) |
83 | 95 |
|
84 | 96 | recordsEdited = 0 |
85 | | -f=csv.writer(open(filePath+'addKeyValuePair'+datetime.now().strftime('%Y-%m-%d %H.%M.%S')+'.csv', 'w')) |
86 | | -f.writerow(['itemID']+['addedKey']+['addedValue']+['delete']+['post']) |
| 97 | +f = csv.writer(open(filePath + 'addKeyValuePair' |
| 98 | + + datetime.now().strftime('%Y-%m-%d %H.%M.%S') + '.csv', 'w')) |
| 99 | +f.writerow(['itemID'] + ['addedKey'] + ['addedValue'] + ['delete'] + ['post']) |
87 | 100 | for number, itemID in enumerate(itemList): |
88 | 101 | itemsRemaining = len(itemList) - number |
89 | 102 | print('Items remaining: ', itemsRemaining, 'ItemID: ', itemID) |
90 | | - metadata = requests.get(baseURL+'/rest/items/'+str(itemID)+'/metadata', headers=header, cookies=cookies, verify=verify).json() |
| 103 | + metadata = requests.get(baseURL + '/rest/items/' + str(itemID) |
| 104 | + + '/metadata', headers=header, cookies=cookies, |
| 105 | + verify=verify).json() |
91 | 106 | itemMetadataProcessed = [] |
92 | | - for l in range (0, len (metadata)): |
| 107 | + for l in range(0, len(metadata)): |
93 | 108 | metadata[l].pop('schema', None) |
94 | 109 | metadata[l].pop('element', None) |
95 | 110 | metadata[l].pop('qualifier', None) |
|
99 | 114 | addedMetadataElement['value'] = addedValue |
100 | 115 | addedMetadataElement['language'] = addedLanguage |
101 | 116 | itemMetadataProcessed.append(addedMetadataElement) |
102 | | - provNote = '\''+addedKey+': '+addedValue+'\' was added through a batch process on '+datetime.now().strftime('%Y-%m-%d %H:%M:%S')+'.' |
| 117 | + provNote = '\'' + addedKey + ': ' + addedValue + '\' was added through a \ |
| 118 | + batch process on ' + datetime.now().strftime('%Y-%m-%d %H:%M:%S') + '.' |
103 | 119 | provNoteElement = {} |
104 | 120 | provNoteElement['key'] = 'dc.description.provenance' |
105 | 121 | provNoteElement['value'] = provNote |
|
108 | 124 | recordsEdited = recordsEdited + 1 |
109 | 125 | itemMetadataProcessed = json.dumps(itemMetadataProcessed) |
110 | 126 | print('updated', itemID, recordsEdited) |
111 | | - delete = requests.delete(baseURL+'/rest/items/'+str(itemID)+'/metadata', headers=header, cookies=cookies, verify=verify) |
| 127 | + delete = requests.delete(baseURL + '/rest/items/' + str(itemID) |
| 128 | + + '/metadata', headers=header, cookies=cookies, |
| 129 | + verify=verify) |
112 | 130 | print(delete) |
113 | | - post = requests.put(baseURL+'/rest/items/'+str(itemID)+'/metadata', headers=header, cookies=cookies, verify=verify, data=itemMetadataProcessed) |
| 131 | + post = requests.put(baseURL + '/rest/items/' + str(itemID) + '/metadata', |
| 132 | + headers=header, cookies=cookies, verify=verify, |
| 133 | + data=itemMetadataProcessed) |
114 | 134 | print(post) |
115 | | - f.writerow([itemID]+[addedKey]+[addedValue]+[delete]+[post]) |
| 135 | + f.writerow([itemID] + [addedKey] + [addedValue] + [delete] + [post]) |
116 | 136 |
|
117 | | -logout = requests.post(baseURL+'/rest/logout', headers=header, cookies=cookies, verify=verify) |
| 137 | +logout = requests.post(baseURL + '/rest/logout', headers=header, |
| 138 | + cookies=cookies, verify=verify) |
118 | 139 |
|
119 | 140 | elapsedTime = time.time() - startTime |
120 | 141 | m, s = divmod(elapsedTime, 60) |
|
0 commit comments