Disable Sahana vulnerability module, bump SAMBRO and SHARE configs
This commit is contained in:
parent
783fd3c094
commit
b189f3c9a2
@ -1412,12 +1412,13 @@ def config(settings):
|
|||||||
restricted = True,
|
restricted = True,
|
||||||
module_type = 10,
|
module_type = 10,
|
||||||
)),
|
)),
|
||||||
("vulnerability", Storage(
|
# https://github.com/sahana/eden/issues/1562
|
||||||
name_nice = T("Vulnerability"),
|
#("vulnerability", Storage(
|
||||||
#description = "Manages vulnerability indicators",
|
# name_nice = T("Vulnerability"),
|
||||||
restricted = True,
|
# #description = "Manages vulnerability indicators",
|
||||||
module_type = 10,
|
# restricted = True,
|
||||||
)),
|
# module_type = 10,
|
||||||
|
#)),
|
||||||
("fire", Storage(
|
("fire", Storage(
|
||||||
name_nice = T("Fire Stations"),
|
name_nice = T("Fire Stations"),
|
||||||
#description = "Fire Station Management",
|
#description = "Fire Station Management",
|
||||||
|
@ -10,6 +10,7 @@ from gluon.html import *
|
|||||||
from gluon.storage import Storage
|
from gluon.storage import Storage
|
||||||
from gluon.languages import lazyT
|
from gluon.languages import lazyT
|
||||||
|
|
||||||
|
from s3compat import BytesIO
|
||||||
from s3 import FS, s3_str, s3_truncate, s3_utc
|
from s3 import FS, s3_str, s3_truncate, s3_utc
|
||||||
|
|
||||||
def config(settings):
|
def config(settings):
|
||||||
@ -124,6 +125,8 @@ def config(settings):
|
|||||||
# Messaging
|
# Messaging
|
||||||
# Parser
|
# Parser
|
||||||
settings.msg.parser = "SAMBRO"
|
settings.msg.parser = "SAMBRO"
|
||||||
|
# Subscriptions
|
||||||
|
settings.msg.notify_check_subscriptions = True
|
||||||
|
|
||||||
# -------------------------------------------------------------------------
|
# -------------------------------------------------------------------------
|
||||||
# Organisations
|
# Organisations
|
||||||
@ -340,9 +343,9 @@ def config(settings):
|
|||||||
label = T("CAP OID"),
|
label = T("CAP OID"),
|
||||||
multiple = False,
|
multiple = False,
|
||||||
fields = [("", "value")],
|
fields = [("", "value")],
|
||||||
filterby = dict(field = "tag",
|
filterby = {"field": "tag",
|
||||||
options = "cap_oid",
|
"options": "cap_oid",
|
||||||
),
|
},
|
||||||
),
|
),
|
||||||
"website",
|
"website",
|
||||||
"comments",
|
"comments",
|
||||||
@ -1467,7 +1470,7 @@ T("""%(status)s %(message_type)s for %(area_description)s with %(priority)s prio
|
|||||||
ack_id = ack_table.insert(**ack_data)
|
ack_id = ack_table.insert(**ack_data)
|
||||||
current.auth.s3_set_record_owner(ack_table, ack_id)
|
current.auth.s3_set_record_owner(ack_table, ack_id)
|
||||||
# Uncomment this when there is onaccept hook
|
# Uncomment this when there is onaccept hook
|
||||||
#s3db.onaccept(ack_table, dict(id=ack_id))
|
#s3db.onaccept(ack_table, {"id": ack_id})
|
||||||
|
|
||||||
return ack_id
|
return ack_id
|
||||||
|
|
||||||
@ -1513,61 +1516,66 @@ T("""%(status)s %(message_type)s for %(area_description)s with %(priority)s prio
|
|||||||
def _get_or_create_attachment(alert_id):
|
def _get_or_create_attachment(alert_id):
|
||||||
"""
|
"""
|
||||||
Retrieve the CAP attachment for the alert_id if present
|
Retrieve the CAP attachment for the alert_id if present
|
||||||
else creates CAP file as attachment to be sent with the email
|
else creates CAP file as attachment to be sent with the
|
||||||
returns the document_id for the CAP file
|
email
|
||||||
|
|
||||||
|
@param alert_id: the cap_alert record ID
|
||||||
|
|
||||||
|
@returns: the doc_id of the CAP file
|
||||||
"""
|
"""
|
||||||
|
|
||||||
s3db = current.s3db
|
s3db = current.s3db
|
||||||
rtable = s3db.cap_resource
|
rtable = s3db.cap_resource
|
||||||
dtable = s3db.doc_document
|
dtable = s3db.doc_document
|
||||||
|
|
||||||
|
# Check for existing CAP XML resource
|
||||||
query = (rtable.alert_id == alert_id) & \
|
query = (rtable.alert_id == alert_id) & \
|
||||||
(rtable.mime_type == "cap") & \
|
(rtable.mime_type == "cap") & \
|
||||||
(rtable.deleted != True) & \
|
(rtable.deleted != True) & \
|
||||||
(dtable.doc_id == rtable.doc_id) & \
|
(dtable.doc_id == rtable.doc_id) & \
|
||||||
(dtable.deleted != True)
|
(dtable.deleted != True)
|
||||||
row = current.db(query).select(dtable.id, limitby=(0, 1)).first()
|
row = current.db(query).select(dtable.doc_id,
|
||||||
if row and row.id:
|
limitby = (0, 1),
|
||||||
return row.id
|
).first()
|
||||||
|
if row:
|
||||||
|
return row.doc_id
|
||||||
|
|
||||||
request = current.request
|
# Create a CAP resource for the CAP XML file
|
||||||
auth = current.auth
|
|
||||||
path_join = os.path.join
|
|
||||||
|
|
||||||
# Create the cap_resource table
|
|
||||||
record = {"alert_id": alert_id,
|
record = {"alert_id": alert_id,
|
||||||
"resource_desc": T("CAP XML File"),
|
"resource_desc": T("CAP XML File"),
|
||||||
"mime_type": "cap" # Hard coded to separate from attachment from user
|
"mime_type": "cap" # Hard-coded to separate from attachment from user
|
||||||
}
|
}
|
||||||
resource_id = rtable.insert(**record)
|
resource_id = rtable.insert(**record)
|
||||||
|
|
||||||
|
# Post-process the CAP resource
|
||||||
record["id"] = resource_id
|
record["id"] = resource_id
|
||||||
s3db.update_super(rtable, record)
|
s3db.update_super(rtable, record)
|
||||||
doc_id = record["doc_id"]
|
doc_id = record["doc_id"]
|
||||||
|
auth = current.auth
|
||||||
auth.s3_set_record_owner(rtable, resource_id)
|
auth.s3_set_record_owner(rtable, resource_id)
|
||||||
auth.s3_make_session_owner(rtable, resource_id)
|
auth.s3_make_session_owner(rtable, resource_id)
|
||||||
s3db.onaccept("cap_resource", record, method="create")
|
s3db.onaccept("cap_resource", record, method="create")
|
||||||
|
|
||||||
resource = s3db.resource("cap_alert")
|
# Generate the CAP XML
|
||||||
resource.add_filter(FS("id") == alert_id)
|
resource = s3db.resource("cap_alert", id=alert_id)
|
||||||
cap_xml = resource.export_xml(stylesheet=path_join(request.folder,
|
cap_xml = resource.export_xml(
|
||||||
"static",
|
stylesheet = os.path.join(current.request.folder,
|
||||||
"formats",
|
"static",
|
||||||
"cap",
|
"formats",
|
||||||
"export.xsl"),
|
"cap",
|
||||||
pretty_print=True)
|
"export.xsl",
|
||||||
file_path = path_join(request.folder,
|
),
|
||||||
"uploads",
|
pretty_print = True,
|
||||||
"%s_%s.xml" % ("cap_alert", str(alert_id)))
|
)
|
||||||
file = open(file_path, "w+")
|
|
||||||
file.write(cap_xml)
|
|
||||||
file.close()
|
|
||||||
|
|
||||||
# Create doc_document record
|
stream = BytesIO(cap_xml)
|
||||||
dtable = s3db.doc_document
|
filename = "%s_%s.xml" % ("cap_alert", s3_str(alert_id))
|
||||||
file = open(file_path, "a+")
|
|
||||||
document_id = dtable.insert(**{"file": file, "doc_id": doc_id})
|
|
||||||
|
|
||||||
file.close()
|
# Store the CAP XML as doc_document for the CAP resource
|
||||||
os.remove(file_path)
|
document = {"file": dtable.file.store(stream, filename),
|
||||||
|
"doc_id": doc_id,
|
||||||
|
}
|
||||||
|
document_id = dtable.insert(**document)
|
||||||
|
|
||||||
return document_id
|
return document_id
|
||||||
|
|
||||||
|
@ -1647,6 +1647,7 @@ S3.redraw_fns.push('tagit')''' % (T("Add tags here…"),
|
|||||||
|
|
||||||
settings.tasks.homepage_stats_update = homepage_stats_update
|
settings.tasks.homepage_stats_update = homepage_stats_update
|
||||||
|
|
||||||
|
# -------------------------------------------------------------------------
|
||||||
def req_need_line_update_stats(r, **attr):
|
def req_need_line_update_stats(r, **attr):
|
||||||
"""
|
"""
|
||||||
Method to manually update the data files for the charts
|
Method to manually update the data files for the charts
|
||||||
@ -1664,7 +1665,7 @@ S3.redraw_fns.push('tagit')''' % (T("Add tags here…"),
|
|||||||
|
|
||||||
if not current.auth.s3_has_role("ADMIN"):
|
if not current.auth.s3_has_role("ADMIN"):
|
||||||
# No, this is not open for everybody
|
# No, this is not open for everybody
|
||||||
r.unauthorized()
|
r.unauthorised()
|
||||||
else:
|
else:
|
||||||
current.s3task.run_async("settings_task",
|
current.s3task.run_async("settings_task",
|
||||||
args = ["homepage_stats_update"])
|
args = ["homepage_stats_update"])
|
||||||
|
Loading…
Reference in New Issue
Block a user