Skip to content

Commit

Permalink
merged PR #153 and fixed missing code from the PR
Browse files Browse the repository at this point in the history
  • Loading branch information
mdorf committed Feb 14, 2024
1 parent bf438b1 commit e7ed676
Showing 1 changed file with 37 additions and 37 deletions.
74 changes: 37 additions & 37 deletions lib/ontologies_linked_data/models/ontology_submission.rb
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ class OntologySubmission < LinkedData::Models::Base
# Links
links_load :submissionId, ontology: [:acronym]
link_to LinkedData::Hypermedia::Link.new("metrics", lambda {|s| "#{self.ontology_link(s)}/submissions/#{s.submissionId}/metrics"}, self.type_uri)
LinkedData::Hypermedia::Link.new("download", lambda {|s| "#{self.ontology_link(s)}/submissions/#{s.submissionId}/download"}, self.type_uri)
LinkedData::Hypermedia::Link.new("download", lambda {|s| "#{self.ontology_link(s)}/submissions/#{s.submissionId}/download"}, self.type_uri)

# HTTP Cache settings
cache_timeout 3600
Expand Down Expand Up @@ -120,7 +120,7 @@ def self.submission_id_generator(ss)
raise ArgumentError, "Submission cannot be saved if ontology does not have acronym"
end
return RDF::URI.new(
"#{(Goo.id_prefix)}ontologies/#{CGI.escape(ss.ontology.acronym.to_s)}/submissions/#{ss.submissionId.to_s}"
"#{(Goo.id_prefix)}ontologies/#{CGI.escape(ss.ontology.acronym.to_s)}/submissions/#{ss.submissionId.to_s}"
)
end

Expand Down Expand Up @@ -234,13 +234,13 @@ def sanity_check
if repeated_names.length > 0
names = repeated_names.keys.to_s
self.errors[:uploadFilePath] <<
"Zip file contains file names (#{names}) in more than one folder."
"Zip file contains file names (#{names}) in more than one folder."
return false
end

#error message with options to choose from.
self.errors[:uploadFilePath] << {
:message => "Zip file detected, choose the master file.", :options => files }
:message => "Zip file detected, choose the master file.", :options => files }
return false

elsif zip and not self.masterFileName.nil?
Expand All @@ -250,9 +250,9 @@ def sanity_check
if self.errors[:uploadFilePath].nil?
self.errors[:uploadFilePath] = []
self.errors[:uploadFilePath] << {
:message =>
"The selected file `#{self.masterFileName}` is not included in the zip file",
:options => files }
:message =>
"The selected file `#{self.masterFileName}` is not included in the zip file",
:options => files }
end
end
end
Expand All @@ -273,7 +273,7 @@ def zipped?(full_file_path = uploadFilePath)
end

def zip_folder
File.join([data_folder, 'unzipped'])
File.join([data_folder, 'unzipped'])
end

def csv_path
Expand Down Expand Up @@ -346,9 +346,9 @@ def diff(logger, older)

LinkedData::Diff.logger = logger
bubastis = LinkedData::Diff::BubastisDiffCommand.new(
File.expand_path(older.master_file_path),
File.expand_path(self.master_file_path),
data_folder
File.expand_path(older.master_file_path),
File.expand_path(self.master_file_path),
data_folder
)
self.diffFilePath = bubastis.diff
save
Expand Down Expand Up @@ -449,7 +449,7 @@ def generate_umls_metrics_file(tr_file_path=nil)
self.generate_metrics_file(class_count, indiv_count, prop_count)
end

def generate_rdf(logger, reasoning: true)
def generate_rdf(logger, file_path, reasoning: true)
mime_type = nil

if self.hasOntologyLanguage.umls?
Expand All @@ -471,11 +471,11 @@ def generate_rdf(logger, reasoning: true)
logger.info("error deleting owlapi.rdf")
end
end
owlapi = owlapi_parser(logger: nil)

if !reasoning
owlapi.disable_reasoner
end
owlapi = LinkedData::Parser::OWLAPICommand.new(
File.expand_path(file_path),
File.expand_path(self.data_folder.to_s),
master_file: self.masterFileName)
owlapi.disable_reasoner unless reasoning
triples_file_path, missing_imports = owlapi.parse

if missing_imports && missing_imports.length > 0
Expand All @@ -502,7 +502,6 @@ def generate_rdf(logger, reasoning: true)
end



def process_callbacks(logger, callbacks, action_name, &block)
callbacks.delete_if do |_, callback|
begin
Expand All @@ -528,6 +527,7 @@ def process_callbacks(logger, callbacks, action_name, &block)
end
end


def loop_classes(logger, raw_paging, callbacks)
page = 1
size = 2500
Expand All @@ -554,7 +554,7 @@ def loop_classes(logger, raw_paging, callbacks)
callbacks.each { |_, callback| callback[:artifacts] ||= {}; iterate_classes = true if callback[:caller_on_each] }

process_callbacks(logger, callbacks, :caller_on_pre) {
|callable, callback| callable.call(callback[:artifacts], logger, paging) }
|callable, callback| callable.call(callback[:artifacts], logger, paging) }

page_len = -1
prev_page_len = -1
Expand Down Expand Up @@ -600,15 +600,15 @@ def loop_classes(logger, raw_paging, callbacks)
count_classes += page_classes.length

process_callbacks(logger, callbacks, :caller_on_pre_page) {
|callable, callback| callable.call(callback[:artifacts], logger, paging, page_classes, page) }
|callable, callback| callable.call(callback[:artifacts], logger, paging, page_classes, page) }

page_classes.each { |c|
process_callbacks(logger, callbacks, :caller_on_each) {
|callable, callback| callable.call(callback[:artifacts], logger, paging, page_classes, page, c) }
|callable, callback| callable.call(callback[:artifacts], logger, paging, page_classes, page, c) }
} if iterate_classes

process_callbacks(logger, callbacks, :caller_on_post_page) {
|callable, callback| callable.call(callback[:artifacts], logger, paging, page_classes, page) }
|callable, callback| callable.call(callback[:artifacts], logger, paging, page_classes, page) }
cls_count += page_classes.length unless cls_count_set

page = page_classes.next? ? page + 1 : nil
Expand All @@ -617,7 +617,7 @@ def loop_classes(logger, raw_paging, callbacks)

callbacks.each { |_, callback| callback[:artifacts][:count_classes] = cls_count }
process_callbacks(logger, callbacks, :caller_on_post) {
|callable, callback| callable.call(callback[:artifacts], logger, paging) }
|callable, callback| callable.call(callback[:artifacts], logger, paging) }
end

logger.info("Completed #{operations}: #{acr} in #{time} sec. #{count_classes} classes.")
Expand Down Expand Up @@ -683,7 +683,7 @@ def generate_missing_labels_each(artifacts={}, logger, paging, page_classes, pag
label = LinkedData::Utils::Triples.last_iri_fragment c.id.to_s
end
artifacts[:label_triples] << LinkedData::Utils::Triples.label_for_class_triple(
c.id, Goo.vocabulary(:metadata_def)[:prefLabel], label)
c.id, Goo.vocabulary(:metadata_def)[:prefLabel], label)
prefLabel = label
else
prefLabel = c.prefLabel
Expand All @@ -694,10 +694,10 @@ def generate_missing_labels_each(artifacts={}, logger, paging, page_classes, pag

if loomLabel.length > 2
artifacts[:mapping_triples] << LinkedData::Utils::Triples.loom_mapping_triple(
c.id, Goo.vocabulary(:metadata_def)[:mappingLoom], loomLabel)
c.id, Goo.vocabulary(:metadata_def)[:mappingLoom], loomLabel)
end
artifacts[:mapping_triples] << LinkedData::Utils::Triples.uri_mapping_triple(
c.id, Goo.vocabulary(:metadata_def)[:mappingSameURI], c.id)
c.id, Goo.vocabulary(:metadata_def)[:mappingSameURI], c.id)
end
end

Expand Down Expand Up @@ -758,7 +758,7 @@ def generate_obsolete_classes(logger, file_path)
self.bring(:obsoleteParent) if self.bring?(:obsoleteParent)
classes_deprecated = []
if self.obsoleteProperty &&
self.obsoleteProperty.to_s != "http://www.w3.org/2002/07/owl#deprecated"
self.obsoleteProperty.to_s != "http://www.w3.org/2002/07/owl#deprecated"

predicate_obsolete = RDF::URI.new(self.obsoleteProperty.to_s)
query_obsolete_predicate = <<eos
Expand All @@ -776,16 +776,16 @@ def generate_obsolete_classes(logger, file_path)
if self.obsoleteParent.nil?
#try to find oboInOWL obsolete.
obo_in_owl_obsolete_class = LinkedData::Models::Class
.find(LinkedData::Utils::Triples.obo_in_owl_obsolete_uri)
.in(self).first
.find(LinkedData::Utils::Triples.obo_in_owl_obsolete_uri)
.in(self).first
if obo_in_owl_obsolete_class
self.obsoleteParent = LinkedData::Utils::Triples.obo_in_owl_obsolete_uri
end
end
if self.obsoleteParent
class_obsolete_parent = LinkedData::Models::Class
.find(self.obsoleteParent)
.in(self).first
.find(self.obsoleteParent)
.in(self).first
if class_obsolete_parent
descendents_obsolete = class_obsolete_parent.descendants
logger.info("Found #{descendents_obsolete.length} descendents of obsolete root #{self.obsoleteParent.to_s}")
Expand All @@ -806,9 +806,9 @@ def generate_obsolete_classes(logger, file_path)
end
fsave.close()
result = Goo.sparql_data_client.append_triples_from_file(
self.id,
save_in_file,
mime_type="application/x-turtle")
self.id,
save_in_file,
mime_type="application/x-turtle")
end
end

Expand Down Expand Up @@ -851,7 +851,7 @@ def remove_submission_status(status)
s.reject! { |stat|
stat_code = stat.get_code_from_id()
stat_code == status.get_code_from_id() ||
stat_code == status.get_error_status().get_code_from_id()
stat_code == status.get_error_status().get_code_from_id()
}
self.submissionStatus = s
end
Expand Down Expand Up @@ -1002,7 +1002,7 @@ def process_submission(logger, options={})
remove_submission_status(status) #remove RDF status before starting
zip_dst = unzip_submission(logger)
file_path = zip_dst ? zip_dst.to_s : self.uploadFilePath.to_s
generate_rdf(logger, file_path, reasoning=reasoning)
generate_rdf(logger, file_path, reasoning: reasoning)
extract_metadata
add_submission_status(status)
self.save
Expand Down Expand Up @@ -1583,7 +1583,7 @@ def master_file_path
path = if zipped?
File.join(self.zip_folder, self.masterFileName)
else
self.uploadFilePath
self.uploadFilePath
end
File.expand_path(path)
end
Expand Down

0 comments on commit e7ed676

Please sign in to comment.