-
Notifications
You must be signed in to change notification settings - Fork 3
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
* fix: add missing ECS folder * missing file
- Loading branch information
Showing
5 changed files
with
358 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,16 @@ | ||
include ../../common.mk | ||
|
||
.PHONY: clean | ||
clean: | ||
rm -f artifact.zip | ||
rm -rf artifact.dir | ||
|
||
# Bundles the application so that it can be dockerized and run in ECS | ||
|
||
cwd := $(shell pwd) | ||
|
||
.PHONY: build-ecs | ||
build-ecs: clean | ||
$(call copy_client_assets,../../client/build,../../server/common/web) ; \ | ||
cp ../../client/build/index.html ../../server/common/web/templates/index.html ; \ | ||
cp -r ../../client/build/* ../../server/common/web/. ; \ |
Empty file.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,205 @@ | ||
import base64 | ||
import hashlib | ||
import logging | ||
import os | ||
import sys | ||
|
||
from flask import json | ||
from flask_cors import CORS | ||
from flask_talisman import Talisman | ||
from logging.config import dictConfig | ||
from urllib.parse import urlparse | ||
|
||
SERVERDIR = os.path.dirname(os.path.realpath(__file__)) | ||
sys.path.append(SERVERDIR) | ||
|
||
|
||
dictConfig( | ||
{ | ||
"version": 1, | ||
"formatters": { | ||
"default": { | ||
"format": "[%(asctime)s] %(levelname)s in %(module)s: %(message)s", | ||
} | ||
}, | ||
"handlers": { | ||
"wsgi": { | ||
"class": "logging.StreamHandler", | ||
"stream": "ext://flask.logging.wsgi_errors_stream", | ||
"formatter": "default", | ||
} | ||
}, | ||
"root": {"level": "INFO", "handlers": ["wsgi"]}, | ||
} | ||
) | ||
|
||
try: | ||
from server.common.config.app_config import AppConfig | ||
from server.app.app import Server | ||
from server.common.utils.data_locator import DataLocator, discover_s3_region_name | ||
except Exception: | ||
logging.critical("Exception importing server modules", exc_info=True) | ||
sys.exit(1) | ||
|
||
|
||
class WSGIServer(Server): | ||
def __init__(self, app_config): | ||
super().__init__(app_config) | ||
|
||
@staticmethod | ||
def _before_adding_routes(app, app_config): | ||
script_hashes = WSGIServer.get_csp_hashes(app, app_config) | ||
server_config = app_config.server_config | ||
|
||
# add the api_base_url to the connect_src csp header. | ||
extra_connect_src = [] | ||
api_base_url = server_config.get_api_base_url() | ||
if api_base_url: | ||
parse_api_base_url = urlparse(api_base_url) | ||
extra_connect_src = [f"{parse_api_base_url.scheme}://{parse_api_base_url.netloc}"] | ||
|
||
PLAUSIBLE_URL = "https://plausible.io" | ||
|
||
csp = { | ||
"default-src": ["'self'"], | ||
"connect-src": ["'self'", PLAUSIBLE_URL] + extra_connect_src, | ||
"script-src": ["'self'", "'unsafe-eval'", PLAUSIBLE_URL] + script_hashes, | ||
"style-src": ["'self'", "'unsafe-inline'"], | ||
"img-src": ["'self'", "https://cellxgene.cziscience.com", "data:"], | ||
"object-src": ["'none'"], | ||
"base-uri": ["'none'"], | ||
"frame-ancestors": ["'none'"], | ||
} | ||
|
||
if not app.debug: | ||
csp["upgrade-insecure-requests"] = "" | ||
|
||
if server_config.app__csp_directives: | ||
for k, v in server_config.app__csp_directives.items(): | ||
if not isinstance(v, list): | ||
v = [v] | ||
csp[k] = csp.get(k, []) + v | ||
|
||
# Add the web_base_url to the CORS header | ||
web_base_url = server_config.get_web_base_url() | ||
if web_base_url: | ||
web_base_url_parse = urlparse(web_base_url) | ||
allowed_origins = [f"{web_base_url_parse.scheme}://{web_base_url_parse.netloc}"] | ||
if os.getenv("DEPLOYMENT_STAGE") in ["Staging", "staging"]: | ||
allowed_origins.extend( | ||
[ | ||
"https://canary-cellxgene.dev.single-cell.czi.technology/", | ||
r"^http://localhost:\d+", | ||
] | ||
) | ||
CORS(app, supports_credentials=True, origins=allowed_origins) | ||
|
||
Talisman( | ||
app, | ||
force_https=server_config.app__force_https, | ||
frame_options="DENY", | ||
content_security_policy=csp, | ||
) | ||
|
||
@staticmethod | ||
def load_static_csp_hashes(app): | ||
csp_hashes = None | ||
try: | ||
with app.open_resource("../common/web/csp-hashes.json") as f: | ||
csp_hashes = json.load(f) | ||
except FileNotFoundError: | ||
pass | ||
if not isinstance(csp_hashes, dict): | ||
csp_hashes = {} | ||
script_hashes = [f"'{hash}'" for hash in csp_hashes.get("script-hashes", [])] | ||
if len(script_hashes) == 0: | ||
logging.error("Content security policy hashes are missing, falling back to unsafe-inline policy") | ||
|
||
return script_hashes | ||
|
||
@staticmethod | ||
def compute_inline_csp_hashes(app, app_config): | ||
dataset_configs = [app_config.default_dataset_config] + list(app_config.dataroot_config.values()) | ||
hashes = [] | ||
for dataset_config in dataset_configs: | ||
inline_scripts = dataset_config.app__inline_scripts | ||
for script in inline_scripts: | ||
with app.open_resource(f"../common/web/templates/{script}") as f: | ||
content = f.read() | ||
# we use jinja2 template include, which trims final newline if present. | ||
if content[-1] == 0x0A: | ||
content = content[0:-1] | ||
hash = base64.b64encode(hashlib.sha256(content).digest()) | ||
hashes.append(f"'sha256-{hash.decode('utf-8')}'") | ||
return hashes | ||
|
||
@staticmethod | ||
def get_csp_hashes(app, app_config): | ||
script_hashes = WSGIServer.load_static_csp_hashes(app) | ||
script_hashes += WSGIServer.compute_inline_csp_hashes(app, app_config) | ||
return script_hashes | ||
|
||
|
||
try: | ||
app_config = AppConfig() | ||
|
||
has_config = False | ||
# config file: look first for "config.yaml" in the current working directory | ||
config_file = "config.yaml" | ||
config_location = DataLocator(config_file) | ||
if config_location.exists(): | ||
with config_location.local_handle() as lh: | ||
logging.info(f"Configuration from {config_file}") | ||
app_config.update_from_config_file(lh) | ||
has_config = True | ||
|
||
else: | ||
# config file: second, use the CXG_CONFIG_FILE | ||
config_file = os.getenv("CXG_CONFIG_FILE") | ||
if config_file: | ||
region_name = discover_s3_region_name(config_file) | ||
config_location = DataLocator(config_file, region_name) | ||
if config_location.exists(): | ||
with config_location.local_handle() as lh: | ||
logging.info(f"Configuration from {config_file}") | ||
app_config.update_from_config_file(lh) | ||
has_config = True | ||
else: | ||
logging.critical(f"Configuration file not found {config_file}") | ||
sys.exit(1) | ||
|
||
if not has_config: | ||
logging.critical("No config file found") | ||
sys.exit(1) | ||
|
||
dataroot = os.getenv("CXG_DATAROOT") | ||
if dataroot: | ||
logging.info("Configuration from CXG_DATAROOT") | ||
app_config.update_server_config(multi_dataset__dataroot=dataroot) | ||
|
||
app_config.update_server_config( | ||
multi_dataset__allowed_matrix_types=["cxg"], | ||
) | ||
|
||
# complete config | ||
app_config.complete_config(logging.info) | ||
|
||
server = WSGIServer(app_config) | ||
debug = False | ||
application = server.app | ||
|
||
except Exception: | ||
logging.critical("Caught exception during initialization", exc_info=True) | ||
sys.exit(1) | ||
|
||
if app_config.is_multi_dataset(): | ||
logging.info(f"starting server with multi_dataset__dataroot={app_config.server_config.multi_dataset__dataroot}") | ||
else: | ||
logging.info(f"starting server with single_dataset__datapath={app_config.server_config.single_dataset__datapath}") | ||
|
||
if __name__ == "__main__": | ||
try: | ||
application.run(host=app_config.server_config.app__host, debug=debug, threaded=not debug, use_debugger=False) | ||
except Exception: | ||
logging.critical("Caught exception during initialization", exc_info=True) | ||
sys.exit(1) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,39 @@ | ||
import sys | ||
import argparse | ||
import yaml | ||
|
||
from server.common.config.app_config import AppConfig | ||
|
||
|
||
def main(): | ||
parser = argparse.ArgumentParser("A script to check hosted configuration files") | ||
parser.add_argument("config_file", help="the configuration file") | ||
parser.add_argument( | ||
"-s", | ||
"--show", | ||
default=False, | ||
action="store_true", | ||
help="print the configuration. NOTE: this may print secret values to stdout", | ||
) | ||
|
||
args = parser.parse_args() | ||
|
||
app_config = AppConfig() | ||
try: | ||
app_config.update_from_config_file(args.config_file) | ||
app_config.complete_config() | ||
except Exception as e: | ||
print(f"Error: {str(e)}") | ||
print("FAIL:", args.config_file) | ||
sys.exit(1) | ||
|
||
if args.show: | ||
yaml_config = app_config.config_to_dict() | ||
yaml.dump(yaml_config, sys.stdout) | ||
|
||
print("PASS:", args.config_file) | ||
sys.exit(0) | ||
|
||
|
||
if __name__ == "__main__": | ||
main() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,98 @@ | ||
"""This is a simple script to ensure the custom requirements.txt do not violate | ||
the server requirements.txt. A hosted cellxgene deployment may specify the exact | ||
version requirements on all the modules, and may add additional modules. | ||
This script is meant to aid in making that list of custom requirements easier to maintain. | ||
If cellxgene adds a new dependency, or changes the version requirements of an existing | ||
dependency, then this script can check if the custom requirements are still valid""" | ||
|
||
import sys | ||
import requirements | ||
from packaging.version import Version | ||
import pkg_resources | ||
|
||
|
||
def check(expected, custom): | ||
"""checks that the custom requirements meet all the requirements of the expected requirements. | ||
The custom set of requirements may contain additional entries than expected. | ||
The requirements in custom must all be exact (==). | ||
An expected requirement must be present in custom, and must match all the specs | ||
for that requirement. | ||
expected : name of the expected requirement.txt file | ||
custom : name of the custom requirements.txt file | ||
""" | ||
edict = parse_requirements(expected) | ||
cdict = parse_requirements(custom) | ||
|
||
okay = True | ||
|
||
# cdict must only have exact requirements (==) | ||
for cname, cspecs in cdict.items(): | ||
if len(cspecs) != 1 or cspecs[0][0] != "==": | ||
print(f"Error, spec must be an exact requirement {custom}: {cname} {str(cspecs)}") | ||
okay = False | ||
|
||
for ename, especs in edict.items(): | ||
if ename not in cdict: | ||
print(f"Error, missing requirement from {custom}: {ename} {str(especs)}") | ||
okay = False | ||
continue | ||
|
||
cver = Version(cdict[ename][0][1]) | ||
for espec in especs: | ||
rokay = check_version(cver, espec[0], Version(espec[1])) | ||
if not rokay: | ||
print(f"Error, failed requirement from {custom}: {ename} {espec}, {cver}") | ||
okay = False | ||
|
||
if okay: | ||
print("requirements check successful") | ||
sys.exit(0) | ||
else: | ||
sys.exit(1) | ||
|
||
|
||
def parse_requirements(fname): | ||
"""Read a requirements file and return a dict of modules name / specification""" | ||
try: | ||
with open(fname, "r") as fd: | ||
try: | ||
# pylint: disable=no-member | ||
rdict = {req.name: req.specs for req in requirements.parse(fd)} | ||
except pkg_resources.RequirementParseError: | ||
print(f"Unable to parse the requirements file: {fname}") | ||
sys.exit(1) | ||
except Exception as e: | ||
print(f"Unable to open file {fname}: {str(e)}") | ||
sys.exit(1) | ||
|
||
return rdict | ||
|
||
|
||
# pylint: disable=too-many-return-statements | ||
def check_version(cver, optype, ever): | ||
""" | ||
Simple version check. | ||
Note: There is more complexity to comparing version (PEP440). | ||
However the use cases in cellxgene are limited, and do not require a general solution. | ||
""" | ||
|
||
if optype == "==": | ||
return cver == ever | ||
if optype == "!=": | ||
return cver != ever | ||
if optype == ">=": | ||
return cver >= ever | ||
if optype == ">": | ||
return cver > ever | ||
if optype == "<=": | ||
return cver <= ever | ||
if optype == "<": | ||
return cver < ever | ||
|
||
print(f"Error, optype not handled: {optype}") | ||
return False | ||
|
||
|
||
if __name__ == "__main__": | ||
check(sys.argv[1], sys.argv[2]) |