Skip to content

Commit

Permalink
removed unnessecary prints; fixed type conversion bug in data_contet_…
Browse files Browse the repository at this point in the history
…metadata
  • Loading branch information
huberrob committed Jul 16, 2021
1 parent c39f7e3 commit 0c96ccd
Show file tree
Hide file tree
Showing 4 changed files with 18 additions and 10 deletions.
3 changes: 1 addition & 2 deletions fuji_server/controllers/fair_check.py
Original file line number Diff line number Diff line change
Expand Up @@ -224,7 +224,6 @@ def uri_validator(u): # TODO integrate into request_helper.py
def set_remote_logging_target(self, host, path):
if host and path:
try:
print(host,path, self.test_id)
weblogger = logging.handlers.HTTPHandler(host,
path + '?testid=' + str(self.test_id), method='POST')
webformatter = logging.Formatter('%(levelname)s - %(message)s \r\n')
Expand Down Expand Up @@ -313,7 +312,7 @@ def retrieve_apis_standards(self):
else:
self.logger.info('FsF-R1.3-01M : Trying to retrieve metadata info from re3data/datacite services using client id -: '+str(client_id))
#find endpoint via datacite/re3data if pid is provided
print(client_id ,self.pid_scheme)
#print(client_id ,self.pid_scheme)
if client_id and self.pid_scheme:
repoHelper = RepositoryHelper(client_id, self.pid_scheme, logger= self.logger.name)
repoHelper.lookup_re3data()
Expand Down
2 changes: 1 addition & 1 deletion fuji_server/controllers/fair_object_controller.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ def assess_by_id(body): # noqa: E501
ft = FAIRCheck(uid=identifier, test_debug=debug, metadata_service_url = metadata_service_endpoint, metadata_service_type =metadata_service_type, use_datacite=usedatacite, oaipmh_endpoint =oaipmh_endpoint)
# set target for remote logging
remote_log_host, remote_log_path = Preprocessor.remote_log_host, Preprocessor.remote_log_path
print(remote_log_host, remote_log_path)
#print(remote_log_host, remote_log_path)
if remote_log_host and remote_log_path:
ft.set_remote_logging_target(remote_log_host, remote_log_path)
uid_result, pid_result = ft.check_unique_persistent()
Expand Down
20 changes: 16 additions & 4 deletions fuji_server/evaluators/fair_evaluator_data_content_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -185,11 +185,23 @@ def evaluate(self):
elif d == 'size':
if tika_content_size == 0:
self.logger.warning('{0} : Could not verify content size (received: 0 bytes) from downloaded file'.format(self.metric_identifier))
elif int(data_object.get('size')) == int(tika_content_size):
matches_content = True
matches_size = True
else:
self.logger.warning('{0} : Could not verify content size from downloaded file -: (expected: {1}, found: {2})'.format(self.metric_identifier, str(data_object.get('size')), str(tika_content_size) ))
#print(type(data_object.get('size')))
try:
object_size=int(float(data_object.get('size')))
if object_size == tika_content_size:
matches_content = True
matches_size = True
else:
self.logger.warning(
'{0} : Could not verify content size from downloaded file -: (expected: {1}, found: {2})'.format(
self.metric_identifier, str(data_object.get('size')),
str(tika_content_size)))

except Exception as e:
self.logger.warning(
'{0} : Could not verify content size from downloaded file -: (expected: {1}, found: {2})'.format(
self.metric_identifier, str(data_object.get('size')), str(tika_content_size)))

data_content_filetype_inner = DataContentMetadataOutputInner()
data_content_filetype_inner.descriptor = descriptor
Expand Down
3 changes: 0 additions & 3 deletions fuji_server/helper/metadata_collector_rdf.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,6 @@ def get_default_metadata(self,g):
else:
meta[l] = str(v)
break
print(meta['related_resources'])
else:
self.logger.info('FsF-F2-01M : Graph seems to contain only one triple, skipping core metadata element test')
except Exception as e:
Expand All @@ -130,7 +129,6 @@ def get_metadata(self,g, item, type='Dataset'):
meta = dict()
#default sparql
met = self.get_default_metadata(item)
print(met)
meta['object_identifier'] = (g.value(item, DC.identifier) or g.value(item, DCTERMS.identifier))
'''
if self.source_name != self.getEnumSourceNames().RDFA.value:
Expand Down Expand Up @@ -160,7 +158,6 @@ def get_metadata(self,g, item, type='Dataset'):
for v in [meta['title'],meta['summary'], meta['publisher']]:
if v:
v = v.toPython()
print(meta)
return meta

def get_ontology_metadata(self, graph):
Expand Down

0 comments on commit 0c96ccd

Please sign in to comment.