From af70730c12469b43098f8fd7ba48f7c4f804d02c Mon Sep 17 00:00:00 2001 From: stevekaplan123 Date: Sun, 5 Nov 2023 09:10:21 +0200 Subject: [PATCH 01/52] chore: started fixing failing tests --- sefaria/helper/tests/topic_test.py | 21 +++++++++++++-------- sefaria/helper/topic.py | 7 ++++--- 2 files changed, 17 insertions(+), 11 deletions(-) diff --git a/sefaria/helper/tests/topic_test.py b/sefaria/helper/tests/topic_test.py index 66a635f394..b5d883d114 100644 --- a/sefaria/helper/tests/topic_test.py +++ b/sefaria/helper/tests/topic_test.py @@ -83,31 +83,36 @@ def child_of_root_wout_self_link(root_wout_self_link): def test_title_and_desc(root_wout_self_link, child_of_root_wout_self_link, root_with_self_link, child_of_root_with_self_link, grandchild_of_root_with_self_link): for t in [root_wout_self_link, child_of_root_wout_self_link, root_with_self_link, child_of_root_with_self_link, grandchild_of_root_with_self_link]: - new_values = {"title": "new title", "heTitle": "new hebrew title", "description": {"en": "desc", "he": "hebrew desc"}} + new_values = {"title": "new title", + "altTitles": {"en": ["New Alt title 1"], "he": ["New He Alt Title 1"]}, + "heTitle": "new hebrew title", "description": {"en": "new desc", "he": "new hebrew desc"}} update_topic(t["topic"], **new_values) + t["topic"] = Topic.init(t["topic"].slug) assert t["topic"].description == new_values["description"] - assert t["topic"].get_primary_title('en') == new_values['title'] assert t["topic"].get_primary_title('he') == new_values['heTitle'] + assert t["topic"].get_titles('en') == ['New Alt title 1', 'new title'] def test_change_categories_and_titles(root_wout_self_link, root_with_self_link): # tests moving both root categories down the tree and back up and asserting that moving down the tree changes the tree # and assert that moving it back to the root position yields the original tree. - # also tests - orig_tree_from_normal_root = library.get_topic_toc_json_recursive(root_wout_self_link["topic"]) orig_tree_from_root_with_self_link = library.get_topic_toc_json_recursive(root_with_self_link["topic"]) orig_trees = [orig_tree_from_normal_root, orig_tree_from_root_with_self_link] roots = [root_wout_self_link["topic"], root_with_self_link["topic"]] orig_titles = [roots[0].get_primary_title('en'), roots[1].get_primary_title('en')] + orig_he_titles = [roots[0].get_primary_title('he'), roots[1].get_primary_title('he')] for i, root in enumerate(roots): other_root = roots[1 - i] - update_topic(root, title=f"fake new title {i+1}", category=other_root.slug) # move root to be child of other root + update_topic(root, title=f"fake new title {i+1}", heTitle=f"fake new he title {i+1}", category=other_root.slug) # move root to be child of other root new_tree = library.get_topic_toc_json_recursive(other_root) assert new_tree != orig_trees[i] # assert that the changes in the tree have occurred - assert root.get_primary_title('en') != orig_titles[i] - update_topic(root, title=orig_titles[i], category=Topic.ROOT) # move it back to the main menu - assert root.get_primary_title('en') == orig_titles[i] + assert root.get_titles('en') != [orig_titles[i]] + assert root.get_titles('he') != [orig_he_titles[i]] + update_topic(root, title=orig_titles[i], heTitle=orig_he_titles[i], category=Topic.ROOT) # move it back to the main menu + assert root.get_titles('en') == [orig_titles[i]] + assert root.get_titles('he') == [orig_he_titles[i]] + final_tree_from_normal_root = library.get_topic_toc_json_recursive(roots[0]) final_tree_from_root_with_self_link = library.get_topic_toc_json_recursive(roots[1]) diff --git a/sefaria/helper/topic.py b/sefaria/helper/topic.py index a18f15cea0..53a24e22fe 100644 --- a/sefaria/helper/topic.py +++ b/sefaria/helper/topic.py @@ -1046,8 +1046,9 @@ def update_topic_titles(topic_obj, data): for lang in ['en', 'he']: for title in topic_obj.get_titles(lang): topic_obj.remove_title(title, lang) - for title in data['altTitles'][lang]: - topic_obj.add_title(title, lang) + if 'altTitles' in data: + for title in data['altTitles'][lang]: + topic_obj.add_title(title, lang) topic_obj.add_title(data['title'], 'en', True, True) topic_obj.add_title(data['heTitle'], 'he', True, True) @@ -1099,7 +1100,7 @@ def update_topic(topic_obj, **kwargs): if kwargs.get('category') == 'authors': topic_obj = update_authors_place_and_time(topic_obj, kwargs) - if 'category' in kwargs and kwargs['category'] != kwargs['origCategory']: + if 'category' in kwargs and kwargs['category'] != kwargs.get('origCategory', kwargs['category']): orig_link = IntraTopicLink().load({"linkType": "displays-under", "fromTopic": topic_obj.slug, "toTopic": {"$ne": topic_obj.slug}}) old_category = orig_link.toTopic if orig_link else Topic.ROOT if old_category != kwargs['category']: From 4f1620138bcdac8ff4764eb577c76310731dc0bd Mon Sep 17 00:00:00 2001 From: saengel Date: Sun, 5 Nov 2023 12:31:37 +0200 Subject: [PATCH 02/52] feat(Backend topic images): Add optional attribute for image --- sefaria/model/topic.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sefaria/model/topic.py b/sefaria/model/topic.py index b259b88311..dbc865f83b 100644 --- a/sefaria/model/topic.py +++ b/sefaria/model/topic.py @@ -43,7 +43,8 @@ class Topic(abst.SluggedAbstractMongoRecord, AbstractTitledObject): 'good_to_promote', 'description_published', # bool to keep track of which descriptions we've vetted 'isAmbiguous', # True if topic primary title can refer to multiple other topics - "data_source" #any topic edited manually should display automatically in the TOC and this flag ensures this + "data_source", #any topic edited manually should display automatically in the TOC and this flag ensures this + 'image' ] ROOT = "Main Menu" # the root of topic TOC is not a topic, so this is a fake slug. we know it's fake because it's not in normal form # this constant is helpful in the topic editor tool functions in this file From da3dafa0f9cea71cfc5b79e4c4ec5610be695e48 Mon Sep 17 00:00:00 2001 From: stevekaplan123 Date: Sun, 5 Nov 2023 13:14:41 +0200 Subject: [PATCH 03/52] fix(Topic Tests): fix 2 failing tests --- sefaria/helper/tests/topic_test.py | 11 +++++------ sefaria/helper/topic.py | 9 +++++---- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/sefaria/helper/tests/topic_test.py b/sefaria/helper/tests/topic_test.py index b5d883d114..4b33cd8c70 100644 --- a/sefaria/helper/tests/topic_test.py +++ b/sefaria/helper/tests/topic_test.py @@ -82,15 +82,14 @@ def child_of_root_wout_self_link(root_wout_self_link): def test_title_and_desc(root_wout_self_link, child_of_root_wout_self_link, root_with_self_link, child_of_root_with_self_link, grandchild_of_root_with_self_link): - for t in [root_wout_self_link, child_of_root_wout_self_link, root_with_self_link, child_of_root_with_self_link, grandchild_of_root_with_self_link]: - new_values = {"title": "new title", - "altTitles": {"en": ["New Alt title 1"], "he": ["New He Alt Title 1"]}, - "heTitle": "new hebrew title", "description": {"en": "new desc", "he": "new hebrew desc"}} + for count, t in enumerate([root_wout_self_link, child_of_root_wout_self_link, root_with_self_link, child_of_root_with_self_link, grandchild_of_root_with_self_link]): + new_values = {"title": f"new title {count+1}", + "altTitles": {"en": [f"New Alt title {count+1}"], "he": [f"New He Alt Title {count+1}"]}, + "heTitle": f"new hebrew title {count+1}", "description": {"en": f"new desc", "he": "new hebrew desc"}} update_topic(t["topic"], **new_values) - t["topic"] = Topic.init(t["topic"].slug) assert t["topic"].description == new_values["description"] assert t["topic"].get_primary_title('he') == new_values['heTitle'] - assert t["topic"].get_titles('en') == ['New Alt title 1', 'new title'] + assert t["topic"].get_titles('en') == [new_values["title"]]+new_values["altTitles"]['en'] def test_change_categories_and_titles(root_wout_self_link, root_with_self_link): diff --git a/sefaria/helper/topic.py b/sefaria/helper/topic.py index 53a24e22fe..126607fcb3 100644 --- a/sefaria/helper/topic.py +++ b/sefaria/helper/topic.py @@ -1043,15 +1043,16 @@ def topic_change_category(topic_obj, new_category, old_category="", rebuild=Fals return topic_obj def update_topic_titles(topic_obj, data): - for lang in ['en', 'he']: + new_primary = {"en": data['title'], "he": data["heTitle"]} + for lang in ['en', 'he']: # first add new primary titles then remove old alt titles and add new alt titles + topic_obj.add_title(new_primary[lang], lang, True, True) for title in topic_obj.get_titles(lang): - topic_obj.remove_title(title, lang) + if title != new_primary[lang]: + topic_obj.remove_title(title, lang) if 'altTitles' in data: for title in data['altTitles'][lang]: topic_obj.add_title(title, lang) - topic_obj.add_title(data['title'], 'en', True, True) - topic_obj.add_title(data['heTitle'], 'he', True, True) return topic_obj From 74d5c1f9755e4bc4380538ce22db90647f0e6c15 Mon Sep 17 00:00:00 2001 From: yonadavGit Date: Mon, 6 Nov 2023 16:52:40 +0200 Subject: [PATCH 04/52] feat(DisplaySettingsButton): change button to A for English interface and aleph for Hebrew interface --- static/css/s2.css | 10 ++++++++++ static/js/Misc.jsx | 14 +++++++++++--- 2 files changed, 21 insertions(+), 3 deletions(-) diff --git a/static/css/s2.css b/static/css/s2.css index 128071435d..3976351fc7 100644 --- a/static/css/s2.css +++ b/static/css/s2.css @@ -4702,6 +4702,16 @@ body .ui-autocomplete.dictionary-toc-autocomplete .ui-menu-item a.ui-state-focus display: inline-block; cursor: pointer; } +.readerOptions .int-en { + font-size: 25px; + line-height: 63px; + margin-right: 8px; +} +.readerOptions .int-he { + font-size: 28px; + line-height: 67px; + margin-left: 6px; +} .rightButtons .readerOptions { vertical-align: middle; } diff --git a/static/js/Misc.jsx b/static/js/Misc.jsx index 7468ab72f4..65a7513316 100644 --- a/static/js/Misc.jsx +++ b/static/js/Misc.jsx @@ -1343,9 +1343,17 @@ class CloseButton extends Component { class DisplaySettingsButton extends Component { render() { var style = this.props.placeholder ? {visibility: "hidden"} : {}; - var icon = Sefaria._siteSettings.TORAH_SPECIFIC ? - Toggle Reader Menu Display Settings : - Aa; + var icon; + + if (Sefaria._siteSettings.TORAH_SPECIFIC) { + icon = + + A + א + ; + } else { + icon = Aa; + } return ( Date: Mon, 6 Nov 2023 17:16:59 +0200 Subject: [PATCH 05/52] feat(css): changed color of saveButton and displayTextButton to darker grey --- static/css/s2.css | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/static/css/s2.css b/static/css/s2.css index 3976351fc7..b5a4d0f3db 100644 --- a/static/css/s2.css +++ b/static/css/s2.css @@ -4653,6 +4653,7 @@ body .ui-autocomplete.dictionary-toc-autocomplete .ui-menu-item a.ui-state-focus height: 18px; width: 18px; margin-top: 3px; + filter: grayscale(100%) brightness(80%) sepia(20%); } .rightButtons .saveButton.tooltip-toggle::before { top: 47px; @@ -4697,7 +4698,7 @@ body .ui-autocomplete.dictionary-toc-autocomplete .ui-menu-item a.ui-state-focus width: 40px; height: 56px; line-height: 56px; - color: #999; + color: #666666; font-size: 20px; display: inline-block; cursor: pointer; From a005786221e3b63c984e988b7ff5f97e69564e09 Mon Sep 17 00:00:00 2001 From: stevekaplan123 Date: Tue, 7 Nov 2023 11:01:55 +0200 Subject: [PATCH 06/52] fix(Title Group): _primary_title[lang] set to None on remove_title of a primary title --- reader/views.py | 10 ++--- sefaria/helper/tests/topic_test.py | 46 +++++++++++--------- sefaria/helper/topic.py | 68 ++++++++++++++---------------- sefaria/model/place.py | 8 ++-- sefaria/model/schema.py | 3 ++ 5 files changed, 71 insertions(+), 64 deletions(-) diff --git a/reader/views.py b/reader/views.py index 385a569c2a..8205705dd3 100644 --- a/reader/views.py +++ b/reader/views.py @@ -3103,14 +3103,14 @@ def add_new_topic_api(request): data = json.loads(request.POST["json"]) isTopLevelDisplay = data["category"] == Topic.ROOT t = Topic({'slug': "", "isTopLevelDisplay": isTopLevelDisplay, "data_source": "sefaria", "numSources": 0}) - update_topic_titles(t, data) + update_topic_titles(t, **data) if not isTopLevelDisplay: # not Top Level so create an IntraTopicLink to category new_link = IntraTopicLink({"toTopic": data["category"], "fromTopic": t.slug, "linkType": "displays-under", "dataSource": "sefaria"}) new_link.save() if data["category"] == 'authors': - t = update_authors_place_and_time(t, data) + t = update_authors_place_and_time(t, **data) t.description_published = True t.data_source = "sefaria" # any topic edited manually should display automatically in the TOC and this flag ensures this @@ -3165,15 +3165,15 @@ def topics_api(request, topic, v2=False): if not request.user.is_staff: return jsonResponse({"error": "Adding topics is locked.

Please email hello@sefaria.org if you believe edits are needed."}) topic_data = json.loads(request.POST["json"]) - topic_obj = Topic().load({'slug': topic_data["origSlug"]}) + topic = Topic().load({'slug': topic_data["origSlug"]}) topic_data["manual"] = True author_status_changed = (topic_data["category"] == "authors") ^ (topic_data["origCategory"] == "authors") - topic_obj = update_topic(topic_obj, **topic_data) + topic = update_topic(topic, **topic_data) if author_status_changed: library.build_topic_auto_completer() def protected_index_post(request): - return jsonResponse(topic_obj.contents()) + return jsonResponse(topic.contents()) return protected_index_post(request) diff --git a/sefaria/helper/tests/topic_test.py b/sefaria/helper/tests/topic_test.py index 4b33cd8c70..526bc88f5d 100644 --- a/sefaria/helper/tests/topic_test.py +++ b/sefaria/helper/tests/topic_test.py @@ -52,10 +52,10 @@ def grandchild_of_root_with_self_link(child_of_root_with_self_link): @pytest.fixture(autouse=True, scope='module') -def root_wout_self_link(): - # create second branch of tree starting with root_wout_self_link +def author_root(): + # create second branch of tree starting with author_root t = Topic({'slug': "", "isTopLevelDisplay": True, "data_source": "sefaria", "numSources": 0}) - title = "Normal Root" + title = "Authors" he_title = title[::-1] t.add_primary_titles(title, he_title) t.set_slug_to_primary_title() @@ -66,23 +66,23 @@ def root_wout_self_link(): @pytest.fixture(autouse=True, scope='module') -def child_of_root_wout_self_link(root_wout_self_link): +def actual_author(author_root): t = Topic({'slug': "", "isTopLevelDisplay": False, "data_source": "sefaria", "numSources": 0}) - title = "Normal Root Leaf Node" + title = "Author Dude" he_title = title[::-1] t.add_primary_titles(title, he_title) t.set_slug_to_primary_title() t.save() l = IntraTopicLink({"linkType": "displays-under", "fromTopic": t.slug, - "toTopic": root_wout_self_link["topic"].slug, "dataSource": "sefaria", - "class": "intraTopic"}).save() # root_wout_self_link has child leaf_node + "toTopic": author_root["topic"].slug, "dataSource": "sefaria", + "class": "intraTopic"}).save() # author_root has child leaf_node yield {"topic": t, "link": l} t.delete() l.delete() -def test_title_and_desc(root_wout_self_link, child_of_root_wout_self_link, root_with_self_link, child_of_root_with_self_link, grandchild_of_root_with_self_link): - for count, t in enumerate([root_wout_self_link, child_of_root_wout_self_link, root_with_self_link, child_of_root_with_self_link, grandchild_of_root_with_self_link]): +def test_title_and_desc(author_root, actual_author, root_with_self_link, child_of_root_with_self_link, grandchild_of_root_with_self_link): + for count, t in enumerate([author_root, actual_author, root_with_self_link, child_of_root_with_self_link, grandchild_of_root_with_self_link]): new_values = {"title": f"new title {count+1}", "altTitles": {"en": [f"New Alt title {count+1}"], "he": [f"New He Alt Title {count+1}"]}, "heTitle": f"new hebrew title {count+1}", "description": {"en": f"new desc", "he": "new hebrew desc"}} @@ -91,14 +91,22 @@ def test_title_and_desc(root_wout_self_link, child_of_root_wout_self_link, root_ assert t["topic"].get_primary_title('he') == new_values['heTitle'] assert t["topic"].get_titles('en') == [new_values["title"]]+new_values["altTitles"]['en'] +def test_author_root(author_root, actual_author): + new_values = {"category": "authors", "title": actual_author["topic"].get_primary_title('en'), + "heTitle": actual_author["topic"].get_primary_title('he'), + "birthPlace": "Kyoto, Japan", "birthYear": 1300} + assert Place().load({'key': new_values["birthPlace"]}) is None + update_topic(actual_author["topic"], **new_values) + assert Place().load({'key': new_values["birthPlace"]}) + assert actual_author["topic"].properties["birthYear"]["value"] == 1300 -def test_change_categories_and_titles(root_wout_self_link, root_with_self_link): +def test_change_categories_and_titles(author_root, root_with_self_link): # tests moving both root categories down the tree and back up and asserting that moving down the tree changes the tree # and assert that moving it back to the root position yields the original tree. - orig_tree_from_normal_root = library.get_topic_toc_json_recursive(root_wout_self_link["topic"]) + orig_tree_from_normal_root = library.get_topic_toc_json_recursive(author_root["topic"]) orig_tree_from_root_with_self_link = library.get_topic_toc_json_recursive(root_with_self_link["topic"]) orig_trees = [orig_tree_from_normal_root, orig_tree_from_root_with_self_link] - roots = [root_wout_self_link["topic"], root_with_self_link["topic"]] + roots = [author_root["topic"], root_with_self_link["topic"]] orig_titles = [roots[0].get_primary_title('en'), roots[1].get_primary_title('en')] orig_he_titles = [roots[0].get_primary_title('he'), roots[1].get_primary_title('he')] for i, root in enumerate(roots): @@ -119,24 +127,24 @@ def test_change_categories_and_titles(root_wout_self_link, root_with_self_link): assert final_tree_from_root_with_self_link == orig_tree_from_root_with_self_link -def test_change_categories(root_wout_self_link, child_of_root_wout_self_link, root_with_self_link, child_of_root_with_self_link, grandchild_of_root_with_self_link): +def test_change_categories(author_root, actual_author, root_with_self_link, child_of_root_with_self_link, grandchild_of_root_with_self_link): # tests moving topics across the tree to a different root - orig_tree_from_normal_root = library.get_topic_toc_json_recursive(root_wout_self_link["topic"]) + orig_tree_from_normal_root = library.get_topic_toc_json_recursive(author_root["topic"]) orig_tree_from_root_with_self_link = library.get_topic_toc_json_recursive(root_with_self_link["topic"]) - topic_change_category(child_of_root_with_self_link["topic"], root_wout_self_link["topic"].slug) - topic_change_category(child_of_root_wout_self_link["topic"], root_with_self_link["topic"].slug) + topic_change_category(child_of_root_with_self_link["topic"], author_root["topic"].slug) + topic_change_category(actual_author["topic"], root_with_self_link["topic"].slug) - new_tree_from_normal_root = library.get_topic_toc_json_recursive(root_wout_self_link["topic"]) + new_tree_from_normal_root = library.get_topic_toc_json_recursive(author_root["topic"]) new_tree_from_root_with_self_link = library.get_topic_toc_json_recursive(root_with_self_link["topic"]) assert new_tree_from_normal_root != orig_tree_from_normal_root assert new_tree_from_root_with_self_link != orig_tree_from_root_with_self_link topic_change_category(child_of_root_with_self_link["topic"], root_with_self_link["topic"].slug) - topic_change_category(child_of_root_wout_self_link["topic"], root_wout_self_link["topic"].slug) + topic_change_category(actual_author["topic"], author_root["topic"].slug) - new_tree_from_normal_root = library.get_topic_toc_json_recursive(root_wout_self_link["topic"]) + new_tree_from_normal_root = library.get_topic_toc_json_recursive(author_root["topic"]) new_tree_from_root_with_self_link = library.get_topic_toc_json_recursive(root_with_self_link["topic"]) assert new_tree_from_normal_root == orig_tree_from_normal_root assert new_tree_from_root_with_self_link == orig_tree_from_root_with_self_link diff --git a/sefaria/helper/topic.py b/sefaria/helper/topic.py index 126607fcb3..837c34b4a5 100644 --- a/sefaria/helper/topic.py +++ b/sefaria/helper/topic.py @@ -1042,28 +1042,24 @@ def topic_change_category(topic_obj, new_category, old_category="", rebuild=Fals rebuild_topic_toc(topic_obj, category_changed=True) return topic_obj -def update_topic_titles(topic_obj, data): - new_primary = {"en": data['title'], "he": data["heTitle"]} +def update_topic_titles(topic, **kwargs): + new_primary = {"en": kwargs['title'], "he": kwargs["heTitle"]} for lang in ['en', 'he']: # first add new primary titles then remove old alt titles and add new alt titles - topic_obj.add_title(new_primary[lang], lang, True, True) - for title in topic_obj.get_titles(lang): - if title != new_primary[lang]: - topic_obj.remove_title(title, lang) - if 'altTitles' in data: - for title in data['altTitles'][lang]: - topic_obj.add_title(title, lang) - - return topic_obj + for title in topic.get_titles(lang): + topic.remove_title(title, lang) + topic.add_title(new_primary[lang], lang, True, False) + if 'altTitles' in kwargs: + for title in kwargs['altTitles'][lang]: + topic.add_title(title, lang) + return topic -def update_authors_place_and_time(topic_obj, data, dataSource='learning-team-editing-tool'): +def update_authors_place_and_time(topic, dataSource='learning-team-editing-tool', **kwargs): # update place info added to author, then update year and era info - if not hasattr(topic_obj, 'properties'): - topic_obj.properties = {} - process_topic_place_change(topic_obj, data) - topic_obj = update_author_era(topic_obj, data, dataSource=dataSource) - - return topic_obj + if not hasattr(topic, 'properties'): + topic.properties = {} + process_topic_place_change(topic, **kwargs) + return update_author_era(topic, **kwargs, dataSource=dataSource) def update_properties(topic_obj, dataSource, k, v): if v == '': @@ -1071,54 +1067,54 @@ def update_properties(topic_obj, dataSource, k, v): else: topic_obj.properties[k] = {'value': v, 'dataSource': dataSource} -def update_author_era(topic_obj, data, dataSource='learning-team-editing-tool'): +def update_author_era(topic_obj, dataSource='learning-team-editing-tool', **kwargs): for k in ["birthYear", "deathYear"]: - if k in data.keys(): # only change property value if key is in data, otherwise it indicates no change - year = data[k] + if k in kwargs.keys(): # only change property value if key exists, otherwise it indicates no change + year = kwargs[k] update_properties(topic_obj, dataSource, k, year) - if 'era' in data.keys(): # only change property value if key is in data, otherwise it indicates no change + if 'era' in kwargs.keys(): # only change property value if key is in data, otherwise it indicates no change prev_era = topic_obj.properties.get('era', {}).get('value') - era = data['era'] + era = kwargs['era'] update_properties(topic_obj, dataSource, 'era', era) if era != '': create_era_link(topic_obj, prev_era_to_delete=prev_era) return topic_obj -def update_topic(topic_obj, **kwargs): +def update_topic(topic, **kwargs): """ Can update topic object's title, hebrew title, category, description, and categoryDescription fields - :param topic_obj: (Topic) The topic to update + :param topic: (Topic) The topic to update :param **kwargs can be title, heTitle, category, description, categoryDescription, and rebuild_toc where `title`, `heTitle`, and `category` are strings. `description` and `categoryDescription` are dictionaries where the fields are `en` and `he`. The `category` parameter should be the slug of the new category. `rebuild_topic_toc` is a boolean and is assumed to be True :return: (model.Topic) The modified topic """ old_category = "" - orig_slug = topic_obj.slug - update_topic_titles(topic_obj, kwargs) + orig_slug = topic.slug + update_topic_titles(topic, **kwargs) if kwargs.get('category') == 'authors': - topic_obj = update_authors_place_and_time(topic_obj, kwargs) + topic = update_authors_place_and_time(topic, **kwargs) if 'category' in kwargs and kwargs['category'] != kwargs.get('origCategory', kwargs['category']): - orig_link = IntraTopicLink().load({"linkType": "displays-under", "fromTopic": topic_obj.slug, "toTopic": {"$ne": topic_obj.slug}}) + orig_link = IntraTopicLink().load({"linkType": "displays-under", "fromTopic": topic.slug, "toTopic": {"$ne": topic.slug}}) old_category = orig_link.toTopic if orig_link else Topic.ROOT if old_category != kwargs['category']: - topic_obj = topic_change_category(topic_obj, kwargs["category"], old_category=old_category) # can change topic and intratopiclinks + topic = topic_change_category(topic, kwargs["category"], old_category=old_category) # can change topic and intratopiclinks if kwargs.get('manual', False): - topic_obj.data_source = "sefaria" # any topic edited manually should display automatically in the TOC and this flag ensures this - topic_obj.description_published = True + topic.data_source = "sefaria" # any topic edited manually should display automatically in the TOC and this flag ensures this + topic.description_published = True if "description" in kwargs or "categoryDescription" in kwargs: - topic_obj.change_description(kwargs.get("description", None), kwargs.get("categoryDescription", None)) + topic.change_description(kwargs.get("description", None), kwargs.get("categoryDescription", None)) - topic_obj.save() + topic.save() if kwargs.get('rebuild_topic_toc', True): - rebuild_topic_toc(topic_obj, orig_slug=orig_slug, category_changed=(old_category != kwargs.get('category', ""))) - return topic_obj + rebuild_topic_toc(topic, orig_slug=orig_slug, category_changed=(old_category != kwargs.get('category', ""))) + return topic def rebuild_topic_toc(topic_obj, orig_slug="", category_changed=False): diff --git a/sefaria/model/place.py b/sefaria/model/place.py index a39bf0e610..a956f1aa7f 100644 --- a/sefaria/model/place.py +++ b/sefaria/model/place.py @@ -105,14 +105,14 @@ def process_index_place_change(indx, **kwargs): if kwargs['new'] != '': Place.create_new_place(en=kwargs['new'], he=he_new_val) -def process_topic_place_change(topic_obj, data): +def process_topic_place_change(topic_obj, **kwargs): keys = ["birthPlace", "deathPlace"] for key in keys: - if key in data.keys(): # only change property value if key is in data, otherwise it indicates no change - new_val = data[key] + if key in kwargs.keys(): # only change property value if key is in data, otherwise it indicates no change + new_val = kwargs[key] if new_val != '': he_key = get_he_key(key) - he_new_val = data.get(he_key, '') + he_new_val = kwargs.get(he_key, '') place = Place.create_new_place(en=new_val, he=he_new_val) topic_obj.properties[key] = {"value": place.primary_name('en'), 'dataSource': 'learning-team-editing-tool'} else: diff --git a/sefaria/model/schema.py b/sefaria/model/schema.py index 305dda01b8..204e0acbac 100644 --- a/sefaria/model/schema.py +++ b/sefaria/model/schema.py @@ -125,6 +125,9 @@ def secondary_titles(self, lang=None): return [t for t in self.all_titles(lang) if t != self.primary_title(lang)] def remove_title(self, text, lang): + is_primary = [t for t in self.titles if not (t["lang"] == lang and t["text"] == text and t.get('primary'))] + if is_primary: + self._primary_title[lang] = None self.titles = [t for t in self.titles if not (t["lang"] == lang and t["text"] == text)] return self From 7b32327ed702114d7b6efbd66bdeea928a1cf028 Mon Sep 17 00:00:00 2001 From: stevekaplan123 Date: Tue, 7 Nov 2023 11:24:48 +0200 Subject: [PATCH 07/52] chore: cleanup is_primary semantics in remove_title --- sefaria/model/schema.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sefaria/model/schema.py b/sefaria/model/schema.py index 204e0acbac..10da95f291 100644 --- a/sefaria/model/schema.py +++ b/sefaria/model/schema.py @@ -125,7 +125,7 @@ def secondary_titles(self, lang=None): return [t for t in self.all_titles(lang) if t != self.primary_title(lang)] def remove_title(self, text, lang): - is_primary = [t for t in self.titles if not (t["lang"] == lang and t["text"] == text and t.get('primary'))] + is_primary = len([t for t in self.titles if not (t["lang"] == lang and t["text"] == text and t.get('primary'))]) if is_primary: self._primary_title[lang] = None self.titles = [t for t in self.titles if not (t["lang"] == lang and t["text"] == text)] From e69470a3cb70c2c1d1e62b8392cfcf191fca2916 Mon Sep 17 00:00:00 2001 From: saengel Date: Tue, 7 Nov 2023 12:52:53 +0200 Subject: [PATCH 08/52] chore(Backend topic images): Add an image-specific exception --- sefaria/system/exceptions.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/sefaria/system/exceptions.py b/sefaria/system/exceptions.py index ff64c89336..b52533363f 100644 --- a/sefaria/system/exceptions.py +++ b/sefaria/system/exceptions.py @@ -58,3 +58,7 @@ class ManuscriptError(Exception): class MissingKeyError(Exception): pass + +class ExternalImageError(Exception): + """Thrown when an image is added to the database that is not hosted in our GCP bucket""" + pass From 6e33b7f8374c428aaa1de0bfb8cd9c5b9c8230f8 Mon Sep 17 00:00:00 2001 From: saengel Date: Tue, 7 Nov 2023 12:53:39 +0200 Subject: [PATCH 09/52] feat(Backend topic images): Add image URI validation --- sefaria/model/topic.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/sefaria/model/topic.py b/sefaria/model/topic.py index dbc865f83b..d06111ac02 100644 --- a/sefaria/model/topic.py +++ b/sefaria/model/topic.py @@ -3,7 +3,7 @@ from .schema import AbstractTitledObject, TitleGroup from .text import Ref, IndexSet, AbstractTextRecord from .category import Category -from sefaria.system.exceptions import InputError, DuplicateRecordError +from sefaria.system.exceptions import InputError, DuplicateRecordError, ExternalImageError from sefaria.model.timeperiod import TimePeriod from sefaria.system.database import db import structlog, bleach @@ -69,6 +69,11 @@ def _validate(self): super(Topic, self)._validate() if getattr(self, 'subclass', False): assert self.subclass in self.subclass_map, f"Field `subclass` set to {self.subclass} which is not one of the valid subclass keys in `Topic.subclass_map`. Valid keys are {', '.join(self.subclass_map.keys())}" + if getattr(self, 'image', False): + try: + self.image.index("https://storage.googleapis.com/img.sefaria.org/topics/") + except ExternalImageError: + print("The image is not stored properly. Topic should be stored in the image GCP bucket, in the topics subdirectory.") def _normalize(self): super()._normalize() From 5f2d045a445894e0cd1e6139659fa51e78424691 Mon Sep 17 00:00:00 2001 From: saengel Date: Tue, 7 Nov 2023 13:00:59 +0200 Subject: [PATCH 10/52] feat(Backend topic images): Generalize image with caption component, generalize css, rework TopicImage component --- static/css/s2.css | 12 ++++++------ static/js/Misc.jsx | 14 +++++++++++++- static/js/TopicPage.jsx | 6 ++---- 3 files changed, 21 insertions(+), 11 deletions(-) diff --git a/static/css/s2.css b/static/css/s2.css index 128071435d..95f55645ec 100644 --- a/static/css/s2.css +++ b/static/css/s2.css @@ -2289,7 +2289,7 @@ div.interfaceLinks-row a { font-family: "Taamey Frank", "adobe-garamond-pro", "Crimson Text", Georgia, "Times New Roman", serif; margin-bottom: -3px; } -.topicPhoto{ +.imageWithCaptionPhoto{ border: 1px solid #EDEDEC; max-width: 100%; height: auto; @@ -2297,13 +2297,13 @@ div.interfaceLinks-row a { top: 121px; left: 835px; } -.topicImageCaption .int-en { +.imageCaption .int-en { font-family: Roboto; } -.topicImageCaption .int-he { +.imageCaption .int-he { font-family: Roboto; } -.topicImageCaption { +.imageCaption { font-size: 12px; font-weight: 400; line-height: 15px; @@ -2316,7 +2316,7 @@ div.interfaceLinks-row a { padding-right: 44px; } @media (max-width: 600px) { - .topicPhoto{ + .imageWithCaptionPhoto{ height: auto; max-width: calc(66.67vw); max-height: calc(66.67vw); @@ -2331,7 +2331,7 @@ div.interfaceLinks-row a { justify-content: center; align-items: center; } - .topicImageCaption { + .imageCaption { font-size: 12px; font-weight: 400; line-height: 15px; diff --git a/static/js/Misc.jsx b/static/js/Misc.jsx index 7468ab72f4..e031c8f928 100644 --- a/static/js/Misc.jsx +++ b/static/js/Misc.jsx @@ -3296,6 +3296,17 @@ const Autocompleter = ({getSuggestions, showSuggestionsOnSelect, inputPlaceholde ) } +const ImageWithCaption = ({photoLink, enCaption, heCaption }) => { + + return ( +
+ +
+ +
+
); +} + export { CategoryHeader, SimpleInterfaceBlock, @@ -3360,5 +3371,6 @@ export { CategoryChooser, TitleVariants, requestWithCallBack, - OnInView + OnInView, + ImageWithCaption }; diff --git a/static/js/TopicPage.jsx b/static/js/TopicPage.jsx index a5bfb8df6f..d9765fa880 100644 --- a/static/js/TopicPage.jsx +++ b/static/js/TopicPage.jsx @@ -23,6 +23,7 @@ import { ToolTipped, SimpleLinkedBlock, CategoryHeader, + ImageWithCaption } from './Misc'; @@ -742,10 +743,7 @@ const TopicImage = ({photoLink, enCaption, heCaption }) => { return (
- -
- -
+
); } From c9c3527aecec0f88d0f86ef79a34d7739164614e Mon Sep 17 00:00:00 2001 From: saengel Date: Tue, 7 Nov 2023 13:06:34 +0200 Subject: [PATCH 11/52] feat(Backend topic images): Add helper function for data migration --- sefaria/helper/topic.py | 21 ++++++++++++++++++++- 1 file changed, 20 insertions(+), 1 deletion(-) diff --git a/sefaria/helper/topic.py b/sefaria/helper/topic.py index a18f15cea0..dd44952761 100644 --- a/sefaria/helper/topic.py +++ b/sefaria/helper/topic.py @@ -1266,4 +1266,23 @@ def delete_ref_topic_link(tref, to_topic, link_type, lang): link.delete() return {"status": "ok"} else: - return {"error": f"Cannot delete link between {tref} and {to_topic}."} \ No newline at end of file + return {"error": f"Cannot delete link between {tref} and {to_topic}."} + + +def add_image_to_topic(topic_slug, image_uri, en_caption, he_caption): + """ + A function to add an image to a Topic in the database. Helper for data migration. + This function queries the desired Topic, adds the image data, and then saves. + :param topic_slug String: A valid slug for a Topic + :param image_uri String: The URI of the image stored in the GCP images bucket, in the topics subdirectory. + NOTE: Incorrectly stored, or external images, will not pass validation for save + :param en_caption String: The English caption for a Topic image + :param he_caption String: The Hebrew caption for a Topic image + """ + topic = Topic().load({'slug': topic_slug}) + topic.image = {"image_uri": image_uri, + "image_caption": { + "en": en_caption, + "he": he_caption + }} + topic.save() \ No newline at end of file From 5241a678fc90614cbf5e8221ec19c1e5c7d68469 Mon Sep 17 00:00:00 2001 From: saengel Date: Tue, 7 Nov 2023 13:20:09 +0200 Subject: [PATCH 12/52] fix(Backend topic images): Adjust validate function --- sefaria/model/topic.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sefaria/model/topic.py b/sefaria/model/topic.py index d06111ac02..a7e3dfbeec 100644 --- a/sefaria/model/topic.py +++ b/sefaria/model/topic.py @@ -71,7 +71,7 @@ def _validate(self): assert self.subclass in self.subclass_map, f"Field `subclass` set to {self.subclass} which is not one of the valid subclass keys in `Topic.subclass_map`. Valid keys are {', '.join(self.subclass_map.keys())}" if getattr(self, 'image', False): try: - self.image.index("https://storage.googleapis.com/img.sefaria.org/topics/") + self.image["image_uri"].index("https://storage.googleapis.com/img.sefaria.org/topics/") except ExternalImageError: print("The image is not stored properly. Topic should be stored in the image GCP bucket, in the topics subdirectory.") From 0d0936c50013b72b4e65dd61f2b0b1b13e254d89 Mon Sep 17 00:00:00 2001 From: saengel Date: Tue, 7 Nov 2023 13:22:42 +0200 Subject: [PATCH 13/52] chore(Backend topic images): Remove hardcoded map --- static/js/TopicPage.jsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/static/js/TopicPage.jsx b/static/js/TopicPage.jsx index d9765fa880..70b19a9c6f 100644 --- a/static/js/TopicPage.jsx +++ b/static/js/TopicPage.jsx @@ -744,7 +744,7 @@ const TopicImage = ({photoLink, enCaption, heCaption }) => { return (
-
); + ); } From 83d857a5e042721f2aa3cdaaf77f9142b4a3a572 Mon Sep 17 00:00:00 2001 From: saengel Date: Tue, 7 Nov 2023 13:22:56 +0200 Subject: [PATCH 14/52] chore(Backend topic images): Remove hardcoded map of data --- static/js/TopicPage.jsx | 28 ---------------------------- 1 file changed, 28 deletions(-) diff --git a/static/js/TopicPage.jsx b/static/js/TopicPage.jsx index 70b19a9c6f..00b6e54a15 100644 --- a/static/js/TopicPage.jsx +++ b/static/js/TopicPage.jsx @@ -188,34 +188,6 @@ const sheetRenderWrapper = (toggleSignUpModal) => item => ( ); -const hardcodedTopicImagesMap = { - 'Rosh Hashanah': {'photoLink':'https://museums.cjh.org/web/objects/common/webmedia.php?irn=11469&reftable=ecatalogue&refirn=6640', - 'enCaption':'Rosh Hashanah, Arthur Szyk (1894-1951) Tempera and ink on paper. New Canaan, 1948. Collection of Yeshiva University Museum. Gift of Charles Frost', - 'heCaption': 'ראש השנה, ארתור שיק, ארה״ב 1948. אוסף ישיבה יוניברסיטי'}, - - 'Yom Kippur': {'photoLink':'https://www.bl.uk/IllImages/BLCD/big/K900/K90075-77.jpg', - 'enCaption':'Micrography of Jonah being swallowed by the fish. Germany, 1300-1500, The British Library', - 'heCaption': 'מיקרוגרפיה של יונה בבטן הדג, מתוך ספר יונה ההפטרה של יום כיפור, 1300-1500'}, - - 'The Four Species': {'photoLink':'https://res.cloudinary.com/the-jewish-museum/image/fetch/q_auto,f_auto/v1/https%3A%2F%2Fthejm.netx.net%2Ffile%2Fasset%2F34234%2Fview%2F52568%2Fview_52568%3Ftoken%3D5d5cdc57-6399-40b5-afb0-93139921700e', - 'enCaption':'Etrog container, K B, late 19th century, Germany. The Jewish Museum, Gift of Dr. Harry G. Friedman', - 'heCaption': 'תיבת אתרוג, סוף המאה ה19, גרמניה. המוזיאון היהודי בניו יורק, מתנת דר. הארי ג. פרידמן '}, - - 'Sukkot': {'photoLink':'https://www.bl.uk/IllImages/BLCD/big/d400/d40054-17a.jpg', - 'enCaption':'Detail of a painting of a sukkah. Image taken from f. 316v of Forli Siddur. 1383, Italian rite. The British Library', - 'heCaption': 'פרט ציור של סוכה עם שולחן פרוש ושלוש דמויות. דימוי מתוך סידור פורלי, 1383 איטליה'}, - - 'Simchat Torah': {'photoLink':'https://upload.wikimedia.org/wikipedia/commons/4/4d/Rosh_Hashanah_greeting_card_%287974345646%29.jpg?20150712114334', - 'enCaption':'Rosh Hashanah postcard: Hakafot, Haim Yisroel Goldberg (1888-1943) Publisher: Williamsburg Post Card Co. Germany, ca. 1915 Collection of Yeshiva University Museum', - 'heCaption': 'גלויה לראש השנה: הקפות, חיים גולדברג, גרמניה 1915, אוסף ישיבה יוניברסיטי'}, - - 'Shabbat': {'photoLink':'https://res.cloudinary.com/the-jewish-museum/image/fetch/q_auto,f_auto/v1/https%3A%2F%2Fthejm.netx.net%2Ffile%2Fasset%2F35064%2Fview%2F61838%2Fview_61838%3Ftoken%3D5d5cdc57-6399-40b5-afb0-93139921700e', - 'enCaption':'Friday Evening, Isidor Kaufmann, Austria c. 1920. The Jewish Museum, Gift of Mr. and Mrs. M. R. Schweitzer', - 'heCaption': 'שישי בערב, איזידור קאופמן, וינה 1920. המוזיאון היהודי בניו יורק, מתנת מר וגברת מ.ר. שוויצר'}, - -}; - - /* *** Components */ From fdfc6039dc729827091e5a7f26951d0f3a34c3f1 Mon Sep 17 00:00:00 2001 From: stevekaplan123 Date: Wed, 8 Nov 2023 09:48:28 +0200 Subject: [PATCH 15/52] fix(Title Group): remove 'not' in remove_title --- sefaria/helper/tests/topic_test.py | 1 + sefaria/helper/topic.py | 6 +++--- sefaria/model/schema.py | 2 +- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/sefaria/helper/tests/topic_test.py b/sefaria/helper/tests/topic_test.py index 526bc88f5d..242539b5dc 100644 --- a/sefaria/helper/tests/topic_test.py +++ b/sefaria/helper/tests/topic_test.py @@ -99,6 +99,7 @@ def test_author_root(author_root, actual_author): update_topic(actual_author["topic"], **new_values) assert Place().load({'key': new_values["birthPlace"]}) assert actual_author["topic"].properties["birthYear"]["value"] == 1300 + Place().load({'key': new_values["birthPlace"]}).delete() def test_change_categories_and_titles(author_root, root_with_self_link): # tests moving both root categories down the tree and back up and asserting that moving down the tree changes the tree diff --git a/sefaria/helper/topic.py b/sefaria/helper/topic.py index 837c34b4a5..30d34725a1 100644 --- a/sefaria/helper/topic.py +++ b/sefaria/helper/topic.py @@ -1042,8 +1042,8 @@ def topic_change_category(topic_obj, new_category, old_category="", rebuild=Fals rebuild_topic_toc(topic_obj, category_changed=True) return topic_obj -def update_topic_titles(topic, **kwargs): - new_primary = {"en": kwargs['title'], "he": kwargs["heTitle"]} +def update_topic_titles(topic, title="", heTitle="", **kwargs): + new_primary = {"en": title, "he": heTitle} for lang in ['en', 'he']: # first add new primary titles then remove old alt titles and add new alt titles for title in topic.get_titles(lang): topic.remove_title(title, lang) @@ -1059,7 +1059,7 @@ def update_authors_place_and_time(topic, dataSource='learning-team-editing-tool' if not hasattr(topic, 'properties'): topic.properties = {} process_topic_place_change(topic, **kwargs) - return update_author_era(topic, **kwargs, dataSource=dataSource) + return update_author_era(topic, dataSource=dataSource, **kwargs) def update_properties(topic_obj, dataSource, k, v): if v == '': diff --git a/sefaria/model/schema.py b/sefaria/model/schema.py index bd107dba02..568f0b89a5 100644 --- a/sefaria/model/schema.py +++ b/sefaria/model/schema.py @@ -125,7 +125,7 @@ def secondary_titles(self, lang=None): return [t for t in self.all_titles(lang) if t != self.primary_title(lang)] def remove_title(self, text, lang): - is_primary = len([t for t in self.titles if not (t["lang"] == lang and t["text"] == text and t.get('primary'))]) + is_primary = len([t for t in self.titles if (t["lang"] == lang and t["text"] == text and t.get('primary'))]) if is_primary: self._primary_title[lang] = None self.titles = [t for t in self.titles if not (t["lang"] == lang and t["text"] == text)] From 9075f4d2109149efa47c3bc1828cd6e1efec3b19 Mon Sep 17 00:00:00 2001 From: stevekaplan123 Date: Wed, 8 Nov 2023 09:52:37 +0200 Subject: [PATCH 16/52] chore: fix comment --- sefaria/helper/topic.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sefaria/helper/topic.py b/sefaria/helper/topic.py index 30d34725a1..4f5982728c 100644 --- a/sefaria/helper/topic.py +++ b/sefaria/helper/topic.py @@ -1044,7 +1044,7 @@ def topic_change_category(topic_obj, new_category, old_category="", rebuild=Fals def update_topic_titles(topic, title="", heTitle="", **kwargs): new_primary = {"en": title, "he": heTitle} - for lang in ['en', 'he']: # first add new primary titles then remove old alt titles and add new alt titles + for lang in ['en', 'he']: # first remove all titles and add new primary and then alt titles for title in topic.get_titles(lang): topic.remove_title(title, lang) topic.add_title(new_primary[lang], lang, True, False) From 948f5038884bee8d0a841e60b7f23cff390ba140 Mon Sep 17 00:00:00 2001 From: saengel Date: Wed, 8 Nov 2023 12:35:59 +0200 Subject: [PATCH 17/52] chore(Backend topic images): Retrieve data for images from API call --- static/js/TopicPage.jsx | 22 ++++++++++++---------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/static/js/TopicPage.jsx b/static/js/TopicPage.jsx index 00b6e54a15..21f4d1ca66 100644 --- a/static/js/TopicPage.jsx +++ b/static/js/TopicPage.jsx @@ -194,7 +194,6 @@ const sheetRenderWrapper = (toggleSignUpModal) => item => ( - const TopicCategory = ({topic, topicTitle, setTopic, setNavTopic, compare, initialWidth, openDisplaySettings, openSearch}) => { const [topicData, setTopicData] = useState(Sefaria.getTopicFromCache(topic) || {primaryTitle: topicTitle}); @@ -312,12 +311,12 @@ const TopicSponsorship = ({topic_slug}) => { ); } -const TopicHeader = ({ topic, topicData, topicTitle, multiPanel, isCat, setNavTopic, openDisplaySettings, openSearch }) => { +const TopicHeader = ({ topic, topicData, topicTitle, multiPanel, isCat, setNavTopic, openDisplaySettings, openSearch, topicImage }) => { const { en, he } = !!topicData && topicData.primaryTitle ? topicData.primaryTitle : {en: "Loading...", he: "טוען..."}; const isTransliteration = !!topicData ? topicData.primaryTitleIsTransliteration : {en: false, he: false}; const category = !!topicData ? Sefaria.topicTocCategory(topicData.slug) : null; - const topicImageKey = topicTitle.en; - const tpTopImg = topicImageKey in hardcodedTopicImagesMap && !multiPanel ? : null; + + const tpTopImg = !multiPanel && topicImage ? : null; return (
@@ -405,6 +404,7 @@ const TopicPage = ({ const [parashaData, setParashaData] = useState(null); const [showFilterHeader, setShowFilterHeader] = useState(false); const tabDisplayData = useTabDisplayData(translationLanguagePreference, versionPref); + const topicImage = topicData.image; const scrollableElement = useRef(); const clearAndSetTopic = (topic, topicTitle) => {setTopic(topic, topicTitle)}; @@ -471,11 +471,12 @@ const TopicPage = ({ onClickFilterIndex = displayTabs.length - 1; } const classStr = classNames({topicPanel: 1, readerNavMenu: 1}); + return
- + {(!topicData.isLoading && displayTabs.length) ? {!topicData.isLoading && } @@ -606,7 +608,7 @@ TopicLink.propTypes = { }; -const TopicSideColumn = ({ slug, links, clearAndSetTopic, parashaData, tref, setNavTopic, timePeriod, properties, topicTitle, multiPanel }) => { +const TopicSideColumn = ({ slug, links, clearAndSetTopic, parashaData, tref, setNavTopic, timePeriod, properties, topicTitle, multiPanel, topicImage }) => { const category = Sefaria.topicTocCategory(slug); const linkTypeArray = links ? Object.values(links).filter(linkType => !!linkType && linkType.shouldDisplay && linkType.links.filter(l => l.shouldDisplay !== false).length > 0) : []; if (linkTypeArray.length === 0) { @@ -626,7 +628,7 @@ const TopicSideColumn = ({ slug, links, clearAndSetTopic, parashaData, tref, set const readingsComponent = hasReadings ? ( ) : null; - const topicMetaData = ; + const topicMetaData = ; const linksComponent = ( links ? linkTypeArray.sort((a, b) => { @@ -776,7 +778,7 @@ const propKeys = [ ]; -const TopicMetaData = ({ topicTitle, timePeriod, multiPanel, properties={} }) => { +const TopicMetaData = ({ topicTitle, timePeriod, multiPanel, topicImage, properties={} }) => { const tpSection = !!timePeriod ? (
@@ -814,8 +816,8 @@ const TopicMetaData = ({ topicTitle, timePeriod, multiPanel, properties={} }) => }
) : null; - const topicImageKey = topicTitle.en; - const tpSidebarImg = topicImageKey in hardcodedTopicImagesMap && multiPanel ? : null; + + const tpSidebarImg = multiPanel && topicImage ? : null; return ( <> {tpSidebarImg} From 122776f5d3947252e0d4def658b87e09ea1407e2 Mon Sep 17 00:00:00 2001 From: saengel Date: Wed, 8 Nov 2023 20:47:59 +0200 Subject: [PATCH 18/52] chore(Backend topic images): Add data migration script --- scripts/topic_data_migration.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 scripts/topic_data_migration.py diff --git a/scripts/topic_data_migration.py b/scripts/topic_data_migration.py new file mode 100644 index 0000000000..e69de29bb2 From 23e04d2316a7edf7b6b5453ca6118763fd3215a1 Mon Sep 17 00:00:00 2001 From: saengel Date: Wed, 8 Nov 2023 20:48:36 +0200 Subject: [PATCH 19/52] chore(Backend topic images): Enhanced data migration script --- scripts/topic_data_migration.py | 43 +++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) diff --git a/scripts/topic_data_migration.py b/scripts/topic_data_migration.py index e69de29bb2..fac1c31ce3 100644 --- a/scripts/topic_data_migration.py +++ b/scripts/topic_data_migration.py @@ -0,0 +1,43 @@ +import django + +django.setup() + +from sefaria.model import * + +from sefaria.helper.topic import add_image_to_topic + +## Adding images + +hardcodedTopicImagesMap = { + 'rosh-hashanah': {'image_uri': 'https://storage.googleapis.com/img.sefaria.org/topics/rosh-hashanah.jpeg', + 'enCaption': 'Rosh Hashanah, Arthur Szyk (1894-1951) Tempera and ink on paper. New Canaan, 1948. Collection of Yeshiva University Museum. Gift of Charles Frost', + 'heCaption': 'ראש השנה, ארתור שיק, ארה״ב 1948. אוסף ישיבה יוניברסיטי'}, + + 'yom-kippur': {'image_uri': 'https://storage.googleapis.com/img.sefaria.org/topics/yom-kippur.jpeg', + 'enCaption': 'Micrography of Jonah being swallowed by the fish. Germany, 1300-1500, The British Library', + 'heCaption': 'מיקרוגרפיה של יונה בבטן הדג, מתוך ספר יונה ההפטרה של יום כיפור, 1300-1500'}, + + 'the-four-species': {'image_uri': 'https://storage.googleapis.com/img.sefaria.org/topics/the-four-species.jpg', + 'enCaption': 'Etrog container, K B, late 19th century, Germany. The Jewish Museum, Gift of Dr. Harry G. Friedman', + 'heCaption': 'תיבת אתרוג, סוף המאה ה19, גרמניה. המוזיאון היהודי בניו יורק, מתנת דר. הארי ג. פרידמן '}, + + 'sukkot': {'image_uri': 'https://storage.googleapis.com/img.sefaria.org/topics/sukkot.jpg', + 'enCaption': 'Detail of a painting of a sukkah. Image taken from f. 316v of Forli Siddur. 1383, Italian rite. The British Library', + 'heCaption': 'פרט ציור של סוכה עם שולחן פרוש ושלוש דמויות. דימוי מתוך סידור פורלי, 1383 איטליה'}, + + 'simchat-torah': {'image_uri': 'https://storage.googleapis.com/img.sefaria.org/topics/simchat-torah.jpg', + 'enCaption': 'Rosh Hashanah postcard: Hakafot, Haim Yisroel Goldberg (1888-1943) Publisher: Williamsburg Post Card Co. Germany, ca. 1915 Collection of Yeshiva University Museum', + 'heCaption': 'גלויה לראש השנה: הקפות, חיים גולדברג, גרמניה 1915, אוסף ישיבה יוניברסיטי'}, + + 'shabbat': {'image_uri': 'https://storage.googleapis.com/img.sefaria.org/topics/shabbat.jpg', + 'enCaption': 'Friday Evening, Isidor Kaufmann, Austria c. 1920. The Jewish Museum, Gift of Mr. and Mrs. M. R. Schweitzer', + 'heCaption': 'שישי בערב, איזידור קאופמן, וינה 1920. המוזיאון היהודי בניו יורק, מתנת מר וגברת מ.ר. שוויצר'}, + +} + +for topic in hardcodedTopicImagesMap: + add_image_to_topic(topic, + image_uri=hardcodedTopicImagesMap[topic]["image_uri"], + en_caption=hardcodedTopicImagesMap[topic]["enCaption"], + he_caption=hardcodedTopicImagesMap[topic]["heCaption"]) + From 621a9475273bc6268e60493570b29f6de56bb032 Mon Sep 17 00:00:00 2001 From: saengel Date: Wed, 8 Nov 2023 20:49:04 +0200 Subject: [PATCH 20/52] fix(Backend topic images): Adjust the caption props as per feedback in code review --- static/js/Misc.jsx | 4 ++-- static/js/TopicPage.jsx | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/static/js/Misc.jsx b/static/js/Misc.jsx index a2e6dfe310..d095b833fd 100644 --- a/static/js/Misc.jsx +++ b/static/js/Misc.jsx @@ -3300,13 +3300,13 @@ const Autocompleter = ({getSuggestions, showSuggestionsOnSelect, inputPlaceholde ) } -const ImageWithCaption = ({photoLink, enCaption, heCaption }) => { +const ImageWithCaption = ({photoLink, caption }) => { return (
- +
); } diff --git a/static/js/TopicPage.jsx b/static/js/TopicPage.jsx index 21f4d1ca66..dc73f4064e 100644 --- a/static/js/TopicPage.jsx +++ b/static/js/TopicPage.jsx @@ -316,7 +316,7 @@ const TopicHeader = ({ topic, topicData, topicTitle, multiPanel, isCat, setNavTo const isTransliteration = !!topicData ? topicData.primaryTitleIsTransliteration : {en: false, he: false}; const category = !!topicData ? Sefaria.topicTocCategory(topicData.slug) : null; - const tpTopImg = !multiPanel && topicImage ? : null; + const tpTopImg = !multiPanel && topicImage ? : null; return (
@@ -713,11 +713,11 @@ const TopicSideSection = ({ title, children, hasMore }) => { ); } -const TopicImage = ({photoLink, enCaption, heCaption }) => { +const TopicImage = ({photoLink, caption }) => { return (
- +
); } @@ -817,7 +817,7 @@ const TopicMetaData = ({ topicTitle, timePeriod, multiPanel, topicImage, propert ) : null; - const tpSidebarImg = multiPanel && topicImage ? : null; + const tpSidebarImg = multiPanel && topicImage ? : null; return ( <> {tpSidebarImg} From 0aeb6e644b80dd7d570ded3205ca041b3bac5ce1 Mon Sep 17 00:00:00 2001 From: yonadavGit Date: Thu, 9 Nov 2023 13:37:03 +0200 Subject: [PATCH 21/52] fix(css): polish reader control icons --- static/css/s2.css | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/static/css/s2.css b/static/css/s2.css index b5a4d0f3db..3d25a53d0d 100644 --- a/static/css/s2.css +++ b/static/css/s2.css @@ -4511,6 +4511,7 @@ body .ui-autocomplete.dictionary-toc-autocomplete .ui-menu-item a.ui-state-focus } .readerControls.transLangPrefSuggBann { background-color: #EDEDEC; + z-index: 99; } .readerControls .readerControlsInner.transLangPrefSuggBannInner { justify-content: center; @@ -4706,11 +4707,11 @@ body .ui-autocomplete.dictionary-toc-autocomplete .ui-menu-item a.ui-state-focus .readerOptions .int-en { font-size: 25px; line-height: 63px; - margin-right: 8px; + margin-right: 5px; } .readerOptions .int-he { - font-size: 28px; - line-height: 67px; + font-size: 32px; + line-height: 65px; margin-left: 6px; } .rightButtons .readerOptions { From 28e4ff0400e44cd4a2600fe1d8ec040ac95fb879 Mon Sep 17 00:00:00 2001 From: yonadavGit Date: Thu, 9 Nov 2023 15:48:53 +0200 Subject: [PATCH 22/52] fix(css): remove margin from English reader icon --- static/css/s2.css | 1 - 1 file changed, 1 deletion(-) diff --git a/static/css/s2.css b/static/css/s2.css index 3d25a53d0d..3586129f39 100644 --- a/static/css/s2.css +++ b/static/css/s2.css @@ -4707,7 +4707,6 @@ body .ui-autocomplete.dictionary-toc-autocomplete .ui-menu-item a.ui-state-focus .readerOptions .int-en { font-size: 25px; line-height: 63px; - margin-right: 5px; } .readerOptions .int-he { font-size: 32px; From c6a3343e07ac6cc1c910d38adf0b34d38f0a27ff Mon Sep 17 00:00:00 2001 From: yonadavGit Date: Sun, 12 Nov 2023 14:27:52 +0200 Subject: [PATCH 23/52] chore(DisplaySettingsButton): changed lang icon to be svg not font --- static/css/s2.css | 7 ++----- static/img/lang_icon_english.svg | 3 +++ static/img/lang_icon_hebrew.svg | 3 +++ static/js/Misc.jsx | 4 ++-- 4 files changed, 10 insertions(+), 7 deletions(-) create mode 100644 static/img/lang_icon_english.svg create mode 100644 static/img/lang_icon_hebrew.svg diff --git a/static/css/s2.css b/static/css/s2.css index 3af0a4047b..62251e0a49 100644 --- a/static/css/s2.css +++ b/static/css/s2.css @@ -4716,13 +4716,10 @@ body .ui-autocomplete.dictionary-toc-autocomplete .ui-menu-item a.ui-state-focus cursor: pointer; } .readerOptions .int-en { - font-size: 25px; - line-height: 63px; + margin-right: 4px; } .readerOptions .int-he { - font-size: 32px; - line-height: 65px; - margin-left: 6px; + margin-left: 8px; } .rightButtons .readerOptions { vertical-align: middle; diff --git a/static/img/lang_icon_english.svg b/static/img/lang_icon_english.svg new file mode 100644 index 0000000000..c30df8b811 --- /dev/null +++ b/static/img/lang_icon_english.svg @@ -0,0 +1,3 @@ + + + diff --git a/static/img/lang_icon_hebrew.svg b/static/img/lang_icon_hebrew.svg new file mode 100644 index 0000000000..a5a410036d --- /dev/null +++ b/static/img/lang_icon_hebrew.svg @@ -0,0 +1,3 @@ + + + diff --git a/static/js/Misc.jsx b/static/js/Misc.jsx index 8953fe16b4..5b5f87a287 100644 --- a/static/js/Misc.jsx +++ b/static/js/Misc.jsx @@ -1348,8 +1348,8 @@ class DisplaySettingsButton extends Component { if (Sefaria._siteSettings.TORAH_SPECIFIC) { icon = - A - א + Toggle Reader Menu Display Settings + Toggle Reader Menu Display Settings ; } else { icon = Aa; From 83aff63b794fe7a57472c34157dbe558b578ad3f Mon Sep 17 00:00:00 2001 From: yonadavGit Date: Mon, 13 Nov 2023 16:51:27 +0200 Subject: [PATCH 24/52] chore(Reader Controls): changed color of bookmark icon in svg, enlarged Hebrew Aleph icon --- static/css/s2.css | 1 - static/icons/bookmark-filled.svg | 2 +- static/icons/bookmark.svg | 2 +- 3 files changed, 2 insertions(+), 3 deletions(-) diff --git a/static/css/s2.css b/static/css/s2.css index 62251e0a49..0d01e8eb44 100644 --- a/static/css/s2.css +++ b/static/css/s2.css @@ -4665,7 +4665,6 @@ body .ui-autocomplete.dictionary-toc-autocomplete .ui-menu-item a.ui-state-focus height: 18px; width: 18px; margin-top: 3px; - filter: grayscale(100%) brightness(80%) sepia(20%); } .rightButtons .saveButton.tooltip-toggle::before { top: 47px; diff --git a/static/icons/bookmark-filled.svg b/static/icons/bookmark-filled.svg index b3c5cabfef..bb38f47693 100644 --- a/static/icons/bookmark-filled.svg +++ b/static/icons/bookmark-filled.svg @@ -1,3 +1,3 @@ - + diff --git a/static/icons/bookmark.svg b/static/icons/bookmark.svg index 3638245685..84eaf9cf14 100644 --- a/static/icons/bookmark.svg +++ b/static/icons/bookmark.svg @@ -1,3 +1,3 @@ - + From 72de0feb1a5ea98af9fde03f806decc9d224e30b Mon Sep 17 00:00:00 2001 From: yonadavGit Date: Mon, 13 Nov 2023 18:02:21 +0200 Subject: [PATCH 25/52] chore(Reader Controls): enlarge Hebrew Aleph only --- static/css/s2.css | 3 +++ 1 file changed, 3 insertions(+) diff --git a/static/css/s2.css b/static/css/s2.css index 0d01e8eb44..9404feac7f 100644 --- a/static/css/s2.css +++ b/static/css/s2.css @@ -4720,6 +4720,9 @@ body .ui-autocomplete.dictionary-toc-autocomplete .ui-menu-item a.ui-state-focus .readerOptions .int-he { margin-left: 8px; } +.readerOptions .int-he img { + height: 18px; +} .rightButtons .readerOptions { vertical-align: middle; } From a5ef1ebc5b2f39e9336e42cfe565947b6a2fd821 Mon Sep 17 00:00:00 2001 From: Brendan Galloway Date: Tue, 14 Nov 2023 13:23:12 +0200 Subject: [PATCH 26/52] ci: remove filters on workflow triggers --- .github/workflows/continuous.yaml | 4 ---- 1 file changed, 4 deletions(-) diff --git a/.github/workflows/continuous.yaml b/.github/workflows/continuous.yaml index 3fa4611d49..eeebf4fdc4 100644 --- a/.github/workflows/continuous.yaml +++ b/.github/workflows/continuous.yaml @@ -1,11 +1,7 @@ name: Continuous on: push: - branches: - - "*" pull_request: - branches: - - "*" concurrency: group: ${{ github.ref }} From 7bb7cfb10c9b2350bc5a857d3505eea40bd7b773 Mon Sep 17 00:00:00 2001 From: stevekaplan123 Date: Wed, 15 Nov 2023 10:59:37 +0200 Subject: [PATCH 27/52] fix(Search): josephus doesnt freeze search --- static/js/Header.jsx | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/static/js/Header.jsx b/static/js/Header.jsx index 5f77a7fc69..d48d90a468 100644 --- a/static/js/Header.jsx +++ b/static/js/Header.jsx @@ -307,8 +307,11 @@ class SearchBar extends Component { .then(d => { // If the query isn't recognized as a ref, but only for reasons of capitalization. Resubmit with recognizable caps. if (Sefaria.isACaseVariant(query, d)) { - this.submitSearch(Sefaria.repairCaseVariant(query, d)); - return; + const repairedQuery = Sefaria.repairCaseVariant(query, d); + if (repairedQuery !== query) { + this.submitSearch(repairedQuery); + return; + } } const repairedQuery = Sefaria.repairGershayimVariant(query, d); if (repairedQuery !== query) { @@ -329,7 +332,8 @@ class SearchBar extends Component { this.props.openTopic(d["topic_slug"]); this.props.onNavigate && this.props.onNavigate(); - } else if (d["type"] === "Person" || d["type"] === "Collection" || d["type"] === "TocCategory") { + } else if (d["type"] === "Person" || d["type"] === "Topic" || d["type"] === "AuthorTopic" + || d["type"] === "Collection" || d["type"] === "TocCategory") { this.redirectToObject(d["type"], d["key"]); } else { From c662738a3d2b362a7fc3cef292fd9120fa29cbf5 Mon Sep 17 00:00:00 2001 From: yonadavGit Date: Wed, 15 Nov 2023 11:15:46 +0200 Subject: [PATCH 28/52] chore(Reader Controls): changed verse numbers to #000 --- static/css/s2.css | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/static/css/s2.css b/static/css/s2.css index 9404feac7f..d0b3e7d367 100644 --- a/static/css/s2.css +++ b/static/css/s2.css @@ -5431,7 +5431,7 @@ But not to use a display block directive that might break continuous mode for ot } .segment .segmentNumber, .textRagnge .numberLabel { - color: #666; + color: #000; top: 0; } .dark .segment .segmentNumber, From 183c7cc556d619ebdfab57937658ce62169e743e Mon Sep 17 00:00:00 2001 From: stevekaplan123 Date: Wed, 15 Nov 2023 14:58:14 +0200 Subject: [PATCH 29/52] fix(Search): repairCaseVariant for non-refs as well as refs --- static/js/Header.jsx | 4 +--- static/js/sefaria/sefaria.js | 2 +- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/static/js/Header.jsx b/static/js/Header.jsx index d48d90a468..003f10b654 100644 --- a/static/js/Header.jsx +++ b/static/js/Header.jsx @@ -332,10 +332,8 @@ class SearchBar extends Component { this.props.openTopic(d["topic_slug"]); this.props.onNavigate && this.props.onNavigate(); - } else if (d["type"] === "Person" || d["type"] === "Topic" || d["type"] === "AuthorTopic" - || d["type"] === "Collection" || d["type"] === "TocCategory") { + } else if (d["type"] === "Person" || d["type"] === "Collection" || d["type"] === "TocCategory") { this.redirectToObject(d["type"], d["key"]); - } else { Sefaria.track.event("Search", "Search Box Search", query); this.closeSearchAutocomplete(); diff --git a/static/js/sefaria/sefaria.js b/static/js/sefaria/sefaria.js index 6941c749b5..db8f4eef29 100644 --- a/static/js/sefaria/sefaria.js +++ b/static/js/sefaria/sefaria.js @@ -2002,7 +2002,7 @@ _media: {}, }, repairCaseVariant: function(query, data) { // Used when isACaseVariant() is true to prepare the alternative - const completionArray = data["completion_objects"].filter(x => x.type === 'ref').map(x => x.title); + const completionArray = data["completion_objects"].map(x => x.title); let normalizedQuery = query.toLowerCase(); let bestMatch = ""; let bestMatchLength = 0; From 9fecaa4e37059f8ce991c102e7238c2574c6bb05 Mon Sep 17 00:00:00 2001 From: stevekaplan123 Date: Thu, 16 Nov 2023 09:07:12 +0200 Subject: [PATCH 30/52] chore: repairCaseVariant calls isACaseVariant --- static/js/Header.jsx | 10 ++++------ static/js/sefaria/sefaria.js | 30 ++++++++++++++++-------------- 2 files changed, 20 insertions(+), 20 deletions(-) diff --git a/static/js/Header.jsx b/static/js/Header.jsx index 003f10b654..7a16a8e3d1 100644 --- a/static/js/Header.jsx +++ b/static/js/Header.jsx @@ -306,12 +306,10 @@ class SearchBar extends Component { Sefaria.getName(query) .then(d => { // If the query isn't recognized as a ref, but only for reasons of capitalization. Resubmit with recognizable caps. - if (Sefaria.isACaseVariant(query, d)) { - const repairedQuery = Sefaria.repairCaseVariant(query, d); - if (repairedQuery !== query) { - this.submitSearch(repairedQuery); - return; - } + const repairedCaseVariant = Sefaria.repairCaseVariant(query, d); + if (repairedCaseVariant !== query) { + this.submitSearch(repairedCaseVariant); + return; } const repairedQuery = Sefaria.repairGershayimVariant(query, d); if (repairedQuery !== query) { diff --git a/static/js/sefaria/sefaria.js b/static/js/sefaria/sefaria.js index db8f4eef29..3da5849570 100644 --- a/static/js/sefaria/sefaria.js +++ b/static/js/sefaria/sefaria.js @@ -2001,20 +2001,22 @@ _media: {}, data["completions"][0] != query.slice(0, data["completions"][0].length)) }, repairCaseVariant: function(query, data) { - // Used when isACaseVariant() is true to prepare the alternative - const completionArray = data["completion_objects"].map(x => x.title); - let normalizedQuery = query.toLowerCase(); - let bestMatch = ""; - let bestMatchLength = 0; - - completionArray.forEach((completion) => { - let normalizedCompletion = completion.toLowerCase(); - if (normalizedQuery.includes(normalizedCompletion) && normalizedCompletion.length > bestMatchLength) { - bestMatch = completion; - bestMatchLength = completion.length; - } - }); - return bestMatch + query.slice(bestMatch.length); + if (Sefaria.isACaseVariant(query, data)) { + const completionArray = data["completion_objects"].map(x => x.title); + let normalizedQuery = query.toLowerCase(); + let bestMatch = ""; + let bestMatchLength = 0; + + completionArray.forEach((completion) => { + let normalizedCompletion = completion.toLowerCase(); + if (normalizedQuery.includes(normalizedCompletion) && normalizedCompletion.length > bestMatchLength) { + bestMatch = completion; + bestMatchLength = completion.length; + } + }); + return bestMatch + query.slice(bestMatch.length); + } + return query; }, repairGershayimVariant: function(query, data) { if (!data["is_ref"] && data.completions && !data.completions.includes(query)) { From 950eeafdcbf1600d6d9c33247b75c6aa209a7b73 Mon Sep 17 00:00:00 2001 From: yonadavGit Date: Thu, 16 Nov 2023 16:10:07 +0200 Subject: [PATCH 31/52] chore(SearchAutocomplete): Ensure magnifying glass appears above search results --- static/js/Header.jsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/static/js/Header.jsx b/static/js/Header.jsx index 5f77a7fc69..de0ca4e2ee 100644 --- a/static/js/Header.jsx +++ b/static/js/Header.jsx @@ -234,7 +234,7 @@ class SearchBar extends Component { }); if (comps.length > 0) { const q = `${this._searchOverridePre}${request.term}${this._searchOverridePost}`; - response(comps.concat([{value: "SEARCH_OVERRIDE", label: q, type: "search"}])); + response([{value: "SEARCH_OVERRIDE", label: q, type: "search"}].concat(comps)); } else { response([]) } From 07c6d97edfb645863b9760942216670c299f8130 Mon Sep 17 00:00:00 2001 From: yonadavGit Date: Thu, 16 Nov 2023 16:51:32 +0200 Subject: [PATCH 32/52] chore(SearchAutocomplete): adjusting magnifying glass border position to appear under it --- static/css/s2.css | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/static/css/s2.css b/static/css/s2.css index 510c84d512..a5665c4f28 100644 --- a/static/css/s2.css +++ b/static/css/s2.css @@ -713,7 +713,7 @@ div:has(#bannerMessage) + .readerApp.singlePanel .mobileNavMenu { color: #999; } .ui-autocomplete .ui-menu-item.search-override { - border-top: solid 1px #ccc; + border-bottom: solid 1px #ccc; padding-top: 12px; } .ui-autocomplete .ui-menu-item.hebrew-result a { From 97a452f68121d1fd9232bf71160b4d6a23ff6817 Mon Sep 17 00:00:00 2001 From: Skyler Cohen Date: Fri, 17 Nov 2023 12:27:42 -0500 Subject: [PATCH 33/52] static(jobs): Remove Communication Specialist job. Set the page to have no jobs --- templates/static/jobs.html | 17 ++++------------- 1 file changed, 4 insertions(+), 13 deletions(-) diff --git a/templates/static/jobs.html b/templates/static/jobs.html index 859ee3fa9e..0b341814f7 100644 --- a/templates/static/jobs.html +++ b/templates/static/jobs.html @@ -16,7 +16,7 @@

-

+ @@ -69,15 +69,6 @@

HR and Operations

--> - -
-
-

Marketing and Communications

-
-
- -
-
@@ -100,7 +91,7 @@

Israel Team

---> - +
; } + const confirmDelete = () => { + if (confirm("Are you sure you want to delete?")) { + deleteObj(); + } + } return
@@ -202,7 +207,7 @@ const AdminEditor = ({title, data, close, catMenu, updateData, savingStatus, })} {extras} {!isNew && -
Delete
} From 0ac17b699cc9d7670f41ba5a5b40ede3b00869bd Mon Sep 17 00:00:00 2001 From: saengel Date: Tue, 21 Nov 2023 15:52:16 +0200 Subject: [PATCH 39/52] chore(Backend topic images): Add Cerberus validation --- sefaria/model/topic.py | 196 +++++++++++++++++++++++++++-------------- 1 file changed, 131 insertions(+), 65 deletions(-) diff --git a/sefaria/model/topic.py b/sefaria/model/topic.py index ffb8c93620..81ca0d9663 100644 --- a/sefaria/model/topic.py +++ b/sefaria/model/topic.py @@ -4,6 +4,7 @@ from .text import Ref, IndexSet, AbstractTextRecord from .category import Category from sefaria.system.exceptions import InputError, DuplicateRecordError +from sefaria.system.validators import validate_url from sefaria.model.timeperiod import TimePeriod, LifePeriod from sefaria.model.portal import Portal from sefaria.system.database import db @@ -11,6 +12,7 @@ from sefaria.model.place import Place import regex as re from typing import Type + logger = structlog.get_logger(__name__) @@ -41,16 +43,42 @@ class Topic(abst.SluggedAbstractMongoRecord, AbstractTitledObject): 'numSources', 'shouldDisplay', 'parasha', # name of parsha as it appears in `parshiot` collection - 'ref', # dictionary for topics with refs associated with them (e.g. parashah) containing strings `en`, `he`, and `url`. + 'ref', + # dictionary for topics with refs associated with them (e.g. parashah) containing strings `en`, `he`, and `url`. 'good_to_promote', 'description_published', # bool to keep track of which descriptions we've vetted 'isAmbiguous', # True if topic primary title can refer to multiple other topics - "data_source", #any topic edited manually should display automatically in the TOC and this flag ensures this + "data_source", # any topic edited manually should display automatically in the TOC and this flag ensures this 'image', "portal_slug", # slug to relevant Portal object ] + attr_schemas = { + "image": { + "image_uri": { + "type": "string", + "required": True, + "regex": "^https://storage\.googleapis\.com/img\.sefaria\.org/topics/.*?" + }, + "image_caption": { + "type": "dict", + "required": True, + "schema": { + "en": { + "type": "string", + "required": True + }, + "he": { + "type": "string", + "required": True + } + } + } + } + } + ROOT = "Main Menu" # the root of topic TOC is not a topic, so this is a fake slug. we know it's fake because it's not in normal form - # this constant is helpful in the topic editor tool functions in this file + + # this constant is helpful in the topic editor tool functions in this file def load(self, query, proj=None): if self.__class__ != Topic: @@ -72,11 +100,9 @@ def _validate(self): super(Topic, self)._validate() if getattr(self, 'subclass', False): assert self.subclass in self.subclass_map, f"Field `subclass` set to {self.subclass} which is not one of the valid subclass keys in `Topic.subclass_map`. Valid keys are {', '.join(self.subclass_map.keys())}" - if getattr(self, 'image', False): - assert "https://storage.googleapis.com/img.sefaria.org/topics/" in self.image["image_uri"], "The image is not stored properly. Topic should be stored in the image GCP bucket, in the topics subdirectory." - - if getattr(self, 'portal_slug', None): - Portal.validate_slug_exists(self.portal_slug) + if getattr(self, "image", False): + img_url = self.image.get("image_uri") + if img_url: validate_url(img_url) def _normalize(self): super()._normalize() @@ -136,7 +162,6 @@ def get_types(self, types=None, curr_path=None, search_slug_set=None): new_topic.get_types(types, new_path, search_slug_set) return types - def change_description(self, desc, cat_desc=None): """ Sets description in all cases and sets categoryDescription if this is a top level topic @@ -158,8 +183,9 @@ def change_description(self, desc, cat_desc=None): def topics_by_link_type_recursively(self, **kwargs): topics, _ = self.topics_and_links_by_link_type_recursively(**kwargs) return topics - - def topics_and_links_by_link_type_recursively(self, linkType='is-a', only_leaves=False, reverse=False, max_depth=None, min_sources=None): + + def topics_and_links_by_link_type_recursively(self, linkType='is-a', only_leaves=False, reverse=False, + max_depth=None, min_sources=None): """ Gets all topics linked to `self` by `linkType`. The query is recursive so it's most useful for 'is-a' and 'displays-under' linkTypes :param linkType: str, the linkType to recursively traverse. @@ -168,11 +194,15 @@ def topics_and_links_by_link_type_recursively(self, linkType='is-a', only_leaves :param max_depth: How many levels below this one to traverse. 1 returns only this node's children, 0 returns only this node and None means unlimited. :return: list(Topic) """ - topics, links, below_min_sources = self._topics_and_links_by_link_type_recursively_helper(linkType, only_leaves, reverse, max_depth, min_sources) - links = list(filter(lambda x: x.fromTopic not in below_min_sources and x.toTopic not in below_min_sources, links)) + topics, links, below_min_sources = self._topics_and_links_by_link_type_recursively_helper(linkType, only_leaves, + reverse, max_depth, + min_sources) + links = list( + filter(lambda x: x.fromTopic not in below_min_sources and x.toTopic not in below_min_sources, links)) return topics, links - def _topics_and_links_by_link_type_recursively_helper(self, linkType, only_leaves, reverse, max_depth, min_sources, explored_set=None, below_min_sources_set=None): + def _topics_and_links_by_link_type_recursively_helper(self, linkType, only_leaves, reverse, max_depth, min_sources, + explored_set=None, below_min_sources_set=None): """ Helper function for `topics_and_links_by_link_type_recursively()` to carry out recursive calls :param explored_set: set(str), set of slugs already explored. To be used in recursive calls. @@ -201,7 +231,8 @@ def _topics_and_links_by_link_type_recursively_helper(self, linkType, only_leave continue if max_depth is None or max_depth > 0: next_depth = max_depth if max_depth is None else max_depth - 1 - temp_topics, temp_links, temp_below_min_sources = child_topic._topics_and_links_by_link_type_recursively_helper(linkType, only_leaves, reverse, next_depth, min_sources, explored_set, below_min_sources_set) + temp_topics, temp_links, temp_below_min_sources = child_topic._topics_and_links_by_link_type_recursively_helper( + linkType, only_leaves, reverse, next_depth, min_sources, explored_set, below_min_sources_set) topics += temp_topics links += temp_links below_min_sources_set |= temp_below_min_sources @@ -218,7 +249,9 @@ def has_types(self, search_slug_set) -> bool: return len(search_slug_set.intersection(types)) > 0 def should_display(self) -> bool: - return getattr(self, 'shouldDisplay', True) and (getattr(self, 'numSources', 0) > 0 or self.has_description() or getattr(self, "data_source", "") == "sefaria") + return getattr(self, 'shouldDisplay', True) and ( + getattr(self, 'numSources', 0) > 0 or self.has_description() or getattr(self, "data_source", + "") == "sefaria") def has_description(self) -> bool: """ @@ -247,7 +280,6 @@ def set_slug(self, new_slug) -> None: self.save() # so that topic with this slug exists when saving links to it self.merge(old_slug) - def merge(self, other: Union['Topic', str]) -> None: """ :param other: Topic or old slug to migrate from @@ -263,13 +295,15 @@ def merge(self, other: Union['Topic', str]) -> None: # links for link in TopicLinkSetHelper.find({"$or": [{"toTopic": other_slug}, {"fromTopic": other_slug}]}): - if link.toTopic == getattr(link, 'fromTopic', None): # self-link where fromTopic and toTopic were equal before slug was changed + if link.toTopic == getattr(link, 'fromTopic', + None): # self-link where fromTopic and toTopic were equal before slug was changed link.fromTopic = self.slug link.toTopic = self.slug else: attr = 'toTopic' if link.toTopic == other_slug else 'fromTopic' setattr(link, attr, self.slug) - if getattr(link, 'fromTopic', None) == link.toTopic: # self-link where fromTopic and toTopic are equal AFTER slug was changed + if getattr(link, 'fromTopic', + None) == link.toTopic: # self-link where fromTopic and toTopic are equal AFTER slug was changed link.delete() continue try: @@ -278,16 +312,19 @@ def merge(self, other: Union['Topic', str]) -> None: link.delete() except AssertionError as e: link.delete() - logger.warning('While merging {} into {}, link assertion failed with message "{}"'.format(other_slug, self.slug, str(e))) + logger.warning( + 'While merging {} into {}, link assertion failed with message "{}"'.format(other_slug, self.slug, + str(e))) # source sheets - db.sheets.update_many({'topics.slug': other_slug}, {"$set": {'topics.$[element].slug': self.slug}}, array_filters=[{"element.slug": other_slug}]) + db.sheets.update_many({'topics.slug': other_slug}, {"$set": {'topics.$[element].slug': self.slug}}, + array_filters=[{"element.slug": other_slug}]) # indexes for index in IndexSet({"authors": other_slug}): index.authors = [self.slug if author_slug == other_slug else author_slug for author_slug in index.authors] props = index._saveable_attrs() - db.index.replace_one({"title":index.title}, props) + db.index.replace_one({"title": index.title}, props) if isinstance(other, Topic): # titles @@ -302,7 +339,12 @@ def merge(self, other: Union['Topic', str]) -> None: temp_dict = getattr(self, dict_attr, {}) for k, v in getattr(other, dict_attr, {}).items(): if k in temp_dict: - logger.warning('Key {} with value {} already exists in {} for topic {}. Current value is {}'.format(k, v, dict_attr, self.slug, temp_dict[k])) + logger.warning( + 'Key {} with value {} already exists in {} for topic {}. Current value is {}'.format(k, v, + dict_attr, + self.slug, + temp_dict[ + k])) continue temp_dict[k] = v if len(temp_dict) > 0: @@ -343,7 +385,9 @@ def contents(self, **kwargs): d = {'slug': self.slug} if mini else super(Topic, self).contents(**kwargs) d['primaryTitle'] = {} for lang in ('en', 'he'): - d['primaryTitle'][lang] = self.get_primary_title(lang=lang, with_disambiguation=kwargs.get('with_disambiguation', True)) + d['primaryTitle'][lang] = self.get_primary_title(lang=lang, + with_disambiguation=kwargs.get('with_disambiguation', + True)) if not kwargs.get("with_html"): for k, v in d.get("description", {}).items(): d["description"][k] = re.sub("<[^>]+>", "", v or "") @@ -406,6 +450,7 @@ class PersonTopic(Topic): """ Represents a topic which is a person. Not necessarily an author of a book. """ + @staticmethod def get_person_by_key(key: str): """ @@ -421,7 +466,7 @@ def annotate_place(self, d): if place and heKey not in properties: value, dataSource = place['value'], place['dataSource'] place_obj = Place().load({"key": value}) - if place_obj: + if place_obj: name = place_obj.primary_name('he') d['properties'][heKey] = {'value': name, 'dataSource': dataSource} return d @@ -444,7 +489,7 @@ def contents(self, **kwargs): } } return d - + # A person may have an era, a generation, or a specific birth and death years, which each may be approximate. # They may also have none of these... def _most_accurate_period(self, time_period_class: Type[TimePeriod]) -> Optional[LifePeriod]: @@ -484,6 +529,7 @@ def most_accurate_life_period(self): ''' return self._most_accurate_period(LifePeriod) + class AuthorTopic(PersonTopic): """ Represents a topic which is an author of a book. Can be used on the `authors` field of `Index` @@ -506,7 +552,8 @@ def _authors_indexes_fill_category(self, indexes, path, include_dependant): path_end_set = {tuple(i.categories[len(path):]) for i in indexes} for index_in_path in indexes_in_path: if tuple(index_in_path.categories[len(path):]) in path_end_set: - if index_in_path.title not in temp_index_title_set and self.slug not in set(getattr(index_in_path, 'authors', [])): + if index_in_path.title not in temp_index_title_set and self.slug not in set( + getattr(index_in_path, 'authors', [])): return False return True @@ -522,20 +569,22 @@ def aggregate_authors_indexes_by_category(self): from collections import defaultdict def index_is_commentary(index): - return getattr(index, 'base_text_titles', None) is not None and len(index.base_text_titles) > 0 and getattr(index, 'collective_title', None) is not None + return getattr(index, 'base_text_titles', None) is not None and len(index.base_text_titles) > 0 and getattr( + index, 'collective_title', None) is not None indexes = self.get_authored_indexes() - - index_or_cat_list = [] # [(index_or_cat, collective_title_term, base_category)] - cat_aggregator = defaultdict(lambda: defaultdict(list)) # of shape {(collective_title, top_cat): {(icat, category): [index_object]}} + + index_or_cat_list = [] # [(index_or_cat, collective_title_term, base_category)] + cat_aggregator = defaultdict( + lambda: defaultdict(list)) # of shape {(collective_title, top_cat): {(icat, category): [index_object]}} MAX_ICAT_FROM_END_TO_CONSIDER = 2 for index in indexes: is_comm = index_is_commentary(index) base = library.get_index(index.base_text_titles[0]) if is_comm else index collective_title = index.collective_title if is_comm else None - base_cat_path = tuple(base.categories[:-MAX_ICAT_FROM_END_TO_CONSIDER+1]) + base_cat_path = tuple(base.categories[:-MAX_ICAT_FROM_END_TO_CONSIDER + 1]) for icat in range(len(base.categories) - MAX_ICAT_FROM_END_TO_CONSIDER, len(base.categories)): - cat_aggregator[(collective_title, base_cat_path)][(icat, tuple(base.categories[:icat+1]))] += [index] + cat_aggregator[(collective_title, base_cat_path)][(icat, tuple(base.categories[:icat + 1]))] += [index] for (collective_title, _), cat_choice_dict in cat_aggregator.items(): cat_choices_sorted = sorted(cat_choice_dict.items(), key=lambda x: (len(x[1]), x[0][0]), reverse=True) (_, best_base_cat_path), temp_indexes = cat_choices_sorted[0] @@ -544,7 +593,7 @@ def index_is_commentary(index): continue if best_base_cat_path == ('Talmud', 'Bavli'): best_base_cat_path = ('Talmud',) # hard-coded to get 'Rashi on Talmud' instead of 'Rashi on Bavli' - + base_category = Category().load({"path": list(best_base_cat_path)}) if collective_title is None: index_category = base_category @@ -552,7 +601,9 @@ def index_is_commentary(index): else: index_category = Category.get_shared_category(temp_indexes) collective_title_term = Term().load({"name": collective_title}) - if index_category is None or not self._authors_indexes_fill_category(temp_indexes, index_category.path, collective_title is not None) or (collective_title is None and self._category_matches_author(index_category)): + if index_category is None or not self._authors_indexes_fill_category(temp_indexes, index_category.path, + collective_title is not None) or ( + collective_title is None and self._category_matches_author(index_category)): for temp_index in temp_indexes: index_or_cat_list += [(temp_index, None, None)] continue @@ -574,9 +625,9 @@ def get_aggregated_urls_for_authors_indexes(self) -> list: en_desc = getattr(index_or_cat, 'enShortDesc', None) he_desc = getattr(index_or_cat, 'heShortDesc', None) if isinstance(index_or_cat, Index): - unique_urls.append({"url":f'/{index_or_cat.title.replace(" ", "_")}', - "title": {"en": index_or_cat.get_title('en'), "he": index_or_cat.get_title('he')}, - "description":{"en": en_desc, "he": he_desc}}) + unique_urls.append({"url": f'/{index_or_cat.title.replace(" ", "_")}', + "title": {"en": index_or_cat.get_title('en'), "he": index_or_cat.get_title('he')}, + "description": {"en": en_desc, "he": he_desc}}) else: if collective_title_term is None: cat_term = Term().load({"name": index_or_cat.sharedTitle}) @@ -588,7 +639,7 @@ def get_aggregated_urls_for_authors_indexes(self) -> list: he_text = f'{collective_title_term.get_primary_title("he")} על {base_category_term.get_primary_title("he")}' unique_urls.append({"url": f'/texts/{"/".join(index_or_cat.path)}', "title": {"en": en_text, "he": he_text}, - "description":{"en": en_desc, "he": he_desc}}) + "description": {"en": en_desc, "he": he_desc}}) return unique_urls @staticmethod @@ -604,9 +655,10 @@ def __init__(self, query=None, *args, **kwargs): if self.recordClass != Topic: # include class name of recordClass + any class names of subclasses query = query or {} - subclass_names = [self.recordClass.__name__] + [klass.__name__ for klass in self.recordClass.all_subclasses()] + subclass_names = [self.recordClass.__name__] + [klass.__name__ for klass in + self.recordClass.all_subclasses()] query['subclass'] = {"$in": [self.recordClass.reverse_subclass_map[name] for name in subclass_names]} - + super().__init__(query=query, *args, **kwargs) @staticmethod @@ -646,7 +698,8 @@ class TopicLinkHelper(object): ] optional_attrs = [ 'generatedBy', - 'order', # dict with some data on how to sort this link. can have key 'custom_order' which should trump other data + 'order', + # dict with some data on how to sort this link. can have key 'custom_order' which should trump other data 'isJudgementCall', ] generated_by_sheets = "sheet-topic-aggregator" @@ -698,8 +751,9 @@ def _validate(self): TopicDataSource.validate_slug_exists(self.dataSource) # check for duplicates - duplicate = IntraTopicLink().load({"linkType": self.linkType, "fromTopic": self.fromTopic, "toTopic": self.toTopic, - "class": getattr(self, 'class'), "_id": {"$ne": getattr(self, "_id", None)}}) + duplicate = IntraTopicLink().load( + {"linkType": self.linkType, "fromTopic": self.fromTopic, "toTopic": self.toTopic, + "class": getattr(self, 'class'), "_id": {"$ne": getattr(self, "_id", None)}}) if duplicate is not None: raise DuplicateRecordError( "Duplicate intra topic link for linkType '{}', fromTopic '{}', toTopic '{}'".format( @@ -707,8 +761,9 @@ def _validate(self): link_type = TopicLinkType.init(self.linkType, 0) if link_type.slug == link_type.inverseSlug: - duplicate_inverse = IntraTopicLink().load({"linkType": self.linkType, "toTopic": self.fromTopic, "fromTopic": self.toTopic, - "class": getattr(self, 'class'), "_id": {"$ne": getattr(self, "_id", None)}}) + duplicate_inverse = IntraTopicLink().load( + {"linkType": self.linkType, "toTopic": self.fromTopic, "fromTopic": self.toTopic, + "class": getattr(self, 'class'), "_id": {"$ne": getattr(self, "_id", None)}}) if duplicate_inverse is not None: raise DuplicateRecordError( "Duplicate intra topic link in the inverse direction of the symmetric linkType '{}', fromTopic '{}', toTopic '{}' exists".format( @@ -718,16 +773,21 @@ def _validate(self): from_topic = Topic.init(self.fromTopic) to_topic = Topic.init(self.toTopic) if getattr(link_type, 'validFrom', False): - assert from_topic.has_types(set(link_type.validFrom)), "from topic '{}' does not have valid types '{}' for link type '{}'. Instead, types are '{}'".format(self.fromTopic, ', '.join(link_type.validFrom), self.linkType, ', '.join(from_topic.get_types())) + assert from_topic.has_types( + set(link_type.validFrom)), "from topic '{}' does not have valid types '{}' for link type '{}'. Instead, types are '{}'".format( + self.fromTopic, ', '.join(link_type.validFrom), self.linkType, ', '.join(from_topic.get_types())) if getattr(link_type, 'validTo', False): - assert to_topic.has_types(set(link_type.validTo)), "to topic '{}' does not have valid types '{}' for link type '{}'. Instead, types are '{}'".format(self.toTopic, ', '.join(link_type.validTo), self.linkType, ', '.join(to_topic.get_types())) + assert to_topic.has_types( + set(link_type.validTo)), "to topic '{}' does not have valid types '{}' for link type '{}'. Instead, types are '{}'".format( + self.toTopic, ', '.join(link_type.validTo), self.linkType, ', '.join(to_topic.get_types())) # assert this link doesn't create circular paths (in is_a link type) # should consider this test also for other non-symmetric link types such as child-of if self.linkType == TopicLinkType.isa_type: to_topic = Topic.init(self.toTopic) ancestors = to_topic.get_types() - assert self.fromTopic not in ancestors, "{} is an is-a ancestor of {} creating an illogical circle in the topics graph, here are {} ancestors: {}".format(self.fromTopic, self.toTopic, self.toTopic, ancestors) + assert self.fromTopic not in ancestors, "{} is an is-a ancestor of {} creating an illogical circle in the topics graph, here are {} ancestors: {}".format( + self.fromTopic, self.toTopic, self.toTopic, ancestors) def contents(self, **kwargs): d = super(IntraTopicLink, self).contents(**kwargs) @@ -817,12 +877,15 @@ def _validate(self): to_topic = Topic.init(self.toTopic) link_type = TopicLinkType.init(self.linkType, 0) if getattr(link_type, 'validTo', False): - assert to_topic.has_types(set(link_type.validTo)), "to topic '{}' does not have valid types '{}' for link type '{}'. Instead, types are '{}'".format(self.toTopic, ', '.join(link_type.validTo), self.linkType, ', '.join(to_topic.get_types())) - + assert to_topic.has_types( + set(link_type.validTo)), "to topic '{}' does not have valid types '{}' for link type '{}'. Instead, types are '{}'".format( + self.toTopic, ', '.join(link_type.validTo), self.linkType, ', '.join(to_topic.get_types())) + def _pre_save(self): if getattr(self, "_id", None) is None: # check for duplicates - query = {"linkType": self.linkType, "ref": self.ref, "toTopic": self.toTopic, "dataSource": getattr(self, 'dataSource', {"$exists": False}), "class": getattr(self, 'class')} + query = {"linkType": self.linkType, "ref": self.ref, "toTopic": self.toTopic, + "dataSource": getattr(self, 'dataSource', {"$exists": False}), "class": getattr(self, 'class')} if getattr(self, "charLevelData", None): query["charLevelData.startChar"] = self.charLevelData['startChar'] query["charLevelData.endChar"] = self.charLevelData['endChar'] @@ -831,8 +894,9 @@ def _pre_save(self): duplicate = RefTopicLink().load(query) if duplicate is not None: - raise DuplicateRecordError("Duplicate ref topic link for linkType '{}', ref '{}', toTopic '{}', dataSource '{}'".format( - self.linkType, self.ref, self.toTopic, getattr(self, 'dataSource', 'N/A'))) + raise DuplicateRecordError( + "Duplicate ref topic link for linkType '{}', ref '{}', toTopic '{}', dataSource '{}'".format( + self.linkType, self.ref, self.toTopic, getattr(self, 'dataSource', 'N/A'))) def contents(self, **kwargs): d = super(RefTopicLink, self).contents(**kwargs) @@ -841,6 +905,7 @@ def contents(self, **kwargs): d.pop('toTopic') return d + class TopicLinkSetHelper(object): @staticmethod def init_query(query, link_class): @@ -852,7 +917,8 @@ def init_query(query, link_class): def find(query=None, page=0, limit=0, sort=[("_id", 1)], proj=None, record_kwargs=None): from sefaria.system.database import db record_kwargs = record_kwargs or {} - raw_records = getattr(db, TopicLinkHelper.collection).find(query, proj).sort(sort).skip(page * limit).limit(limit) + raw_records = getattr(db, TopicLinkHelper.collection).find(query, proj).sort(sort).skip(page * limit).limit( + limit) return [TopicLinkHelper.init_by_class(r, **record_kwargs) for r in raw_records] @@ -952,11 +1018,13 @@ def process_index_title_change_in_topic_links(indx, **kwargs): except InputError: logger.warning("Failed to convert ref data from: {} to {}".format(kwargs['old'], kwargs['new'])) + def process_index_delete_in_topic_links(indx, **kwargs): from sefaria.model.text import prepare_index_regex_for_dependency_process pattern = prepare_index_regex_for_dependency_process(indx) RefTopicLinkSet({"ref": {"$regex": pattern}}).delete() + def process_topic_delete(topic): RefTopicLinkSet({"toTopic": topic.slug}).delete() IntraTopicLinkSet({"$or": [{"toTopic": topic.slug}, {"fromTopic": topic.slug}]}).delete() @@ -964,18 +1032,21 @@ def process_topic_delete(topic): sheet["topics"] = [t for t in sheet["topics"] if t["slug"] != topic.slug] db.sheets.save(sheet) + def process_topic_description_change(topic, **kwargs): """ Upon topic description change, get rid of old markdown links and save any new ones """ # need to delete currently existing links but dont want to delete link if its still in the description # so load up a dictionary of relevant data -> link - IntraTopicLinkSet({"toTopic": topic.slug, "linkType": "related-to", "dataSource": "learning-team-editing-tool"}).delete() + IntraTopicLinkSet( + {"toTopic": topic.slug, "linkType": "related-to", "dataSource": "learning-team-editing-tool"}).delete() refLinkType = 'popular-writing-of' if getattr(topic, 'subclass', '') == 'author' else 'about' - RefTopicLinkSet({"toTopic": topic.slug, "linkType": refLinkType, "dataSource": "learning-team-editing-tool"}).delete() + RefTopicLinkSet( + {"toTopic": topic.slug, "linkType": refLinkType, "dataSource": "learning-team-editing-tool"}).delete() markdown_links = set() - for lang, val in kwargs['new'].items(): # put each link in a set so we dont try to create duplicate of same link + for lang, val in kwargs['new'].items(): # put each link in a set so we dont try to create duplicate of same link for m in re.findall('\[.*?\]\((.*?)\)', val): markdown_links.add(m) @@ -995,12 +1066,7 @@ def process_topic_description_change(topic, **kwargs): ref = Ref(markdown_link).normal() except InputError as e: continue - ref_topic_dict = {"toTopic": topic.slug, "dataSource": "learning-team-editing-tool", "linkType": refLinkType, + ref_topic_dict = {"toTopic": topic.slug, "dataSource": "learning-team-editing-tool", + "linkType": refLinkType, 'ref': ref} RefTopicLink(ref_topic_dict).save() - - - - - - From 604663f3fab491767ecea54f1625c64740b3c8fb Mon Sep 17 00:00:00 2001 From: Russel Neiss Date: Tue, 21 Nov 2023 15:51:39 -0600 Subject: [PATCH 40/52] fix: Fixes random text API so that it doesn't 500 if it's missing the `titles` and `categories` query param --- reader/views.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/reader/views.py b/reader/views.py index b93cc2bd1e..76696f924f 100644 --- a/reader/views.py +++ b/reader/views.py @@ -4022,8 +4022,17 @@ def random_text_api(request): """ Return Texts API data for a random ref. """ - categories = set(request.GET.get('categories', '').split('|')) - titles = set(request.GET.get('titles', '').split('|')) + + if "categories" in request.GET: + categories = set(request.GET.get('categories', '').split('|')) + else: + categories = None + + if "titles" in request.GET: + titles = set(request.GET.get('titles', '').split('|')) + else: + titles = None + response = redirect(iri_to_uri("/api/texts/" + random_ref(categories, titles)) + "?commentary=0&context=0", permanent=False) return response @@ -4575,4 +4584,3 @@ def isNodeJsReachable(): logger.warn("Failed rollout healthcheck. Healthcheck Response: {}".format(resp)) return http.JsonResponse(resp, status=statusCode) - From b5f2c4ddfa8a55ea1573d2450636ddcdc00764be Mon Sep 17 00:00:00 2001 From: saengel Date: Wed, 22 Nov 2023 10:51:04 +0200 Subject: [PATCH 41/52] chore(Backend topic images): Revert bad autoformat --- sefaria/model/topic.py | 55 +++++++++++++----------------------------- 1 file changed, 17 insertions(+), 38 deletions(-) diff --git a/sefaria/model/topic.py b/sefaria/model/topic.py index 81ca0d9663..2fcc080723 100644 --- a/sefaria/model/topic.py +++ b/sefaria/model/topic.py @@ -184,8 +184,7 @@ def topics_by_link_type_recursively(self, **kwargs): topics, _ = self.topics_and_links_by_link_type_recursively(**kwargs) return topics - def topics_and_links_by_link_type_recursively(self, linkType='is-a', only_leaves=False, reverse=False, - max_depth=None, min_sources=None): + def topics_and_links_by_link_type_recursively(self, linkType='is-a', only_leaves=False, reverse=False, max_depth=None, min_sources=None): """ Gets all topics linked to `self` by `linkType`. The query is recursive so it's most useful for 'is-a' and 'displays-under' linkTypes :param linkType: str, the linkType to recursively traverse. @@ -201,8 +200,7 @@ def topics_and_links_by_link_type_recursively(self, linkType='is-a', only_leaves filter(lambda x: x.fromTopic not in below_min_sources and x.toTopic not in below_min_sources, links)) return topics, links - def _topics_and_links_by_link_type_recursively_helper(self, linkType, only_leaves, reverse, max_depth, min_sources, - explored_set=None, below_min_sources_set=None): + def _topics_and_links_by_link_type_recursively_helper(self, linkType, only_leaves, reverse, max_depth, min_sources, explored_set=None, below_min_sources_set=None): """ Helper function for `topics_and_links_by_link_type_recursively()` to carry out recursive calls :param explored_set: set(str), set of slugs already explored. To be used in recursive calls. @@ -295,8 +293,7 @@ def merge(self, other: Union['Topic', str]) -> None: # links for link in TopicLinkSetHelper.find({"$or": [{"toTopic": other_slug}, {"fromTopic": other_slug}]}): - if link.toTopic == getattr(link, 'fromTopic', - None): # self-link where fromTopic and toTopic were equal before slug was changed + if link.toTopic == getattr(link, 'fromTopic', None): # self-link where fromTopic and toTopic were equal before slug was changed link.fromTopic = self.slug link.toTopic = self.slug else: @@ -312,9 +309,7 @@ def merge(self, other: Union['Topic', str]) -> None: link.delete() except AssertionError as e: link.delete() - logger.warning( - 'While merging {} into {}, link assertion failed with message "{}"'.format(other_slug, self.slug, - str(e))) + logger.warning('While merging {} into {}, link assertion failed with message "{}"'.format(other_slug, self.slug, str(e))) # source sheets db.sheets.update_many({'topics.slug': other_slug}, {"$set": {'topics.$[element].slug': self.slug}}, @@ -339,12 +334,7 @@ def merge(self, other: Union['Topic', str]) -> None: temp_dict = getattr(self, dict_attr, {}) for k, v in getattr(other, dict_attr, {}).items(): if k in temp_dict: - logger.warning( - 'Key {} with value {} already exists in {} for topic {}. Current value is {}'.format(k, v, - dict_attr, - self.slug, - temp_dict[ - k])) + logger.warning('Key {} with value {} already exists in {} for topic {}. Current value is {}'.format(k, v, dict_attr, self.slug, temp_dict[k])) continue temp_dict[k] = v if len(temp_dict) > 0: @@ -386,8 +376,7 @@ def contents(self, **kwargs): d['primaryTitle'] = {} for lang in ('en', 'he'): d['primaryTitle'][lang] = self.get_primary_title(lang=lang, - with_disambiguation=kwargs.get('with_disambiguation', - True)) + with_disambiguation=kwargs.get('with_disambiguation', True)) if not kwargs.get("with_html"): for k, v in d.get("description", {}).items(): d["description"][k] = re.sub("<[^>]+>", "", v or "") @@ -525,7 +514,7 @@ def most_accurate_time_period(self): def most_accurate_life_period(self): ''' - :return: most accurate period as LifePeriod. currently the only difference from TimePeriod is the way the time period is formatted as a string. + :return: most accurate period as LifePeriod. Currently, the only difference from TimePeriod is the way the time period is formatted as a string. ''' return self._most_accurate_period(LifePeriod) @@ -552,8 +541,7 @@ def _authors_indexes_fill_category(self, indexes, path, include_dependant): path_end_set = {tuple(i.categories[len(path):]) for i in indexes} for index_in_path in indexes_in_path: if tuple(index_in_path.categories[len(path):]) in path_end_set: - if index_in_path.title not in temp_index_title_set and self.slug not in set( - getattr(index_in_path, 'authors', [])): + if index_in_path.title not in temp_index_title_set and self.slug not in set(getattr(index_in_path, 'authors', [])): return False return True @@ -569,8 +557,7 @@ def aggregate_authors_indexes_by_category(self): from collections import defaultdict def index_is_commentary(index): - return getattr(index, 'base_text_titles', None) is not None and len(index.base_text_titles) > 0 and getattr( - index, 'collective_title', None) is not None + return getattr(index, 'base_text_titles', None) is not None and len(index.base_text_titles) > 0 and getattr(index, 'collective_title', None) is not None indexes = self.get_authored_indexes() @@ -601,9 +588,7 @@ def index_is_commentary(index): else: index_category = Category.get_shared_category(temp_indexes) collective_title_term = Term().load({"name": collective_title}) - if index_category is None or not self._authors_indexes_fill_category(temp_indexes, index_category.path, - collective_title is not None) or ( - collective_title is None and self._category_matches_author(index_category)): + if index_category is None or not self._authors_indexes_fill_category(temp_indexes, index_category.path, collective_title is not None) or (collective_title is None and self._category_matches_author(index_category)): for temp_index in temp_indexes: index_or_cat_list += [(temp_index, None, None)] continue @@ -774,20 +759,17 @@ def _validate(self): to_topic = Topic.init(self.toTopic) if getattr(link_type, 'validFrom', False): assert from_topic.has_types( - set(link_type.validFrom)), "from topic '{}' does not have valid types '{}' for link type '{}'. Instead, types are '{}'".format( - self.fromTopic, ', '.join(link_type.validFrom), self.linkType, ', '.join(from_topic.get_types())) + set(link_type.validFrom)), "from topic '{}' does not have valid types '{}' for link type '{}'. Instead, types are '{}'".format(self.fromTopic, ', '.join(link_type.validFrom), self.linkType, ', '.join(from_topic.get_types())) if getattr(link_type, 'validTo', False): assert to_topic.has_types( - set(link_type.validTo)), "to topic '{}' does not have valid types '{}' for link type '{}'. Instead, types are '{}'".format( - self.toTopic, ', '.join(link_type.validTo), self.linkType, ', '.join(to_topic.get_types())) + set(link_type.validTo)), "to topic '{}' does not have valid types '{}' for link type '{}'. Instead, types are '{}'".format(self.toTopic, ', '.join(link_type.validTo), self.linkType, ', '.join(to_topic.get_types())) # assert this link doesn't create circular paths (in is_a link type) # should consider this test also for other non-symmetric link types such as child-of if self.linkType == TopicLinkType.isa_type: to_topic = Topic.init(self.toTopic) ancestors = to_topic.get_types() - assert self.fromTopic not in ancestors, "{} is an is-a ancestor of {} creating an illogical circle in the topics graph, here are {} ancestors: {}".format( - self.fromTopic, self.toTopic, self.toTopic, ancestors) + assert self.fromTopic not in ancestors, "{} is an is-a ancestor of {} creating an illogical circle in the topics graph, here are {} ancestors: {}".format(self.fromTopic, self.toTopic, self.toTopic, ancestors) def contents(self, **kwargs): d = super(IntraTopicLink, self).contents(**kwargs) @@ -878,8 +860,7 @@ def _validate(self): link_type = TopicLinkType.init(self.linkType, 0) if getattr(link_type, 'validTo', False): assert to_topic.has_types( - set(link_type.validTo)), "to topic '{}' does not have valid types '{}' for link type '{}'. Instead, types are '{}'".format( - self.toTopic, ', '.join(link_type.validTo), self.linkType, ', '.join(to_topic.get_types())) + set(link_type.validTo)), "to topic '{}' does not have valid types '{}' for link type '{}'. Instead, types are '{}'".format(self.toTopic, ', '.join(link_type.validTo), self.linkType, ', '.join(to_topic.get_types())) def _pre_save(self): if getattr(self, "_id", None) is None: @@ -917,8 +898,7 @@ def init_query(query, link_class): def find(query=None, page=0, limit=0, sort=[("_id", 1)], proj=None, record_kwargs=None): from sefaria.system.database import db record_kwargs = record_kwargs or {} - raw_records = getattr(db, TopicLinkHelper.collection).find(query, proj).sort(sort).skip(page * limit).limit( - limit) + raw_records = getattr(db, TopicLinkHelper.collection).find(query, proj).sort(sort).skip(page * limit).limit(limit) return [TopicLinkHelper.init_by_class(r, **record_kwargs) for r in raw_records] @@ -1042,11 +1022,10 @@ def process_topic_description_change(topic, **kwargs): IntraTopicLinkSet( {"toTopic": topic.slug, "linkType": "related-to", "dataSource": "learning-team-editing-tool"}).delete() refLinkType = 'popular-writing-of' if getattr(topic, 'subclass', '') == 'author' else 'about' - RefTopicLinkSet( - {"toTopic": topic.slug, "linkType": refLinkType, "dataSource": "learning-team-editing-tool"}).delete() + RefTopicLinkSet({"toTopic": topic.slug, "linkType": refLinkType, "dataSource": "learning-team-editing-tool"}).delete() markdown_links = set() - for lang, val in kwargs['new'].items(): # put each link in a set so we dont try to create duplicate of same link + for lang, val in kwargs['new'].items(): # put each link in a set, so we don't try to create duplicate of same link for m in re.findall('\[.*?\]\((.*?)\)', val): markdown_links.add(m) From 41d0d778a0d93dfdcea6e03e7009e0f8f8461fe6 Mon Sep 17 00:00:00 2001 From: saengel Date: Wed, 22 Nov 2023 10:52:15 +0200 Subject: [PATCH 42/52] chore(Backend topic images): Remove script for data migration --- scripts/topic_data_migration.py | 43 --------------------------------- 1 file changed, 43 deletions(-) delete mode 100644 scripts/topic_data_migration.py diff --git a/scripts/topic_data_migration.py b/scripts/topic_data_migration.py deleted file mode 100644 index fac1c31ce3..0000000000 --- a/scripts/topic_data_migration.py +++ /dev/null @@ -1,43 +0,0 @@ -import django - -django.setup() - -from sefaria.model import * - -from sefaria.helper.topic import add_image_to_topic - -## Adding images - -hardcodedTopicImagesMap = { - 'rosh-hashanah': {'image_uri': 'https://storage.googleapis.com/img.sefaria.org/topics/rosh-hashanah.jpeg', - 'enCaption': 'Rosh Hashanah, Arthur Szyk (1894-1951) Tempera and ink on paper. New Canaan, 1948. Collection of Yeshiva University Museum. Gift of Charles Frost', - 'heCaption': 'ראש השנה, ארתור שיק, ארה״ב 1948. אוסף ישיבה יוניברסיטי'}, - - 'yom-kippur': {'image_uri': 'https://storage.googleapis.com/img.sefaria.org/topics/yom-kippur.jpeg', - 'enCaption': 'Micrography of Jonah being swallowed by the fish. Germany, 1300-1500, The British Library', - 'heCaption': 'מיקרוגרפיה של יונה בבטן הדג, מתוך ספר יונה ההפטרה של יום כיפור, 1300-1500'}, - - 'the-four-species': {'image_uri': 'https://storage.googleapis.com/img.sefaria.org/topics/the-four-species.jpg', - 'enCaption': 'Etrog container, K B, late 19th century, Germany. The Jewish Museum, Gift of Dr. Harry G. Friedman', - 'heCaption': 'תיבת אתרוג, סוף המאה ה19, גרמניה. המוזיאון היהודי בניו יורק, מתנת דר. הארי ג. פרידמן '}, - - 'sukkot': {'image_uri': 'https://storage.googleapis.com/img.sefaria.org/topics/sukkot.jpg', - 'enCaption': 'Detail of a painting of a sukkah. Image taken from f. 316v of Forli Siddur. 1383, Italian rite. The British Library', - 'heCaption': 'פרט ציור של סוכה עם שולחן פרוש ושלוש דמויות. דימוי מתוך סידור פורלי, 1383 איטליה'}, - - 'simchat-torah': {'image_uri': 'https://storage.googleapis.com/img.sefaria.org/topics/simchat-torah.jpg', - 'enCaption': 'Rosh Hashanah postcard: Hakafot, Haim Yisroel Goldberg (1888-1943) Publisher: Williamsburg Post Card Co. Germany, ca. 1915 Collection of Yeshiva University Museum', - 'heCaption': 'גלויה לראש השנה: הקפות, חיים גולדברג, גרמניה 1915, אוסף ישיבה יוניברסיטי'}, - - 'shabbat': {'image_uri': 'https://storage.googleapis.com/img.sefaria.org/topics/shabbat.jpg', - 'enCaption': 'Friday Evening, Isidor Kaufmann, Austria c. 1920. The Jewish Museum, Gift of Mr. and Mrs. M. R. Schweitzer', - 'heCaption': 'שישי בערב, איזידור קאופמן, וינה 1920. המוזיאון היהודי בניו יורק, מתנת מר וגברת מ.ר. שוויצר'}, - -} - -for topic in hardcodedTopicImagesMap: - add_image_to_topic(topic, - image_uri=hardcodedTopicImagesMap[topic]["image_uri"], - en_caption=hardcodedTopicImagesMap[topic]["enCaption"], - he_caption=hardcodedTopicImagesMap[topic]["heCaption"]) - From c023bbd0d7c74a23515a2e02263e9ce5f28f2c84 Mon Sep 17 00:00:00 2001 From: saengel Date: Wed, 22 Nov 2023 10:53:20 +0200 Subject: [PATCH 43/52] fix(Backend topic images): Prefer use of Topic.init vs load() --- sefaria/helper/topic.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sefaria/helper/topic.py b/sefaria/helper/topic.py index 504fe11ade..5e73abcab7 100644 --- a/sefaria/helper/topic.py +++ b/sefaria/helper/topic.py @@ -1274,7 +1274,7 @@ def add_image_to_topic(topic_slug, image_uri, en_caption, he_caption): :param en_caption String: The English caption for a Topic image :param he_caption String: The Hebrew caption for a Topic image """ - topic = Topic().load({'slug': topic_slug}) + topic = Topic.init(topic_slug) topic.image = {"image_uri": image_uri, "image_caption": { "en": en_caption, From 7dbccb3b0138912216a40c68af6e38ec56caa10c Mon Sep 17 00:00:00 2001 From: saengel Date: Wed, 22 Nov 2023 11:02:56 +0200 Subject: [PATCH 44/52] fix(Backend topic images): Restore portal validation to topic validate --- sefaria/model/topic.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/sefaria/model/topic.py b/sefaria/model/topic.py index 2fcc080723..56f07b8272 100644 --- a/sefaria/model/topic.py +++ b/sefaria/model/topic.py @@ -103,6 +103,8 @@ def _validate(self): if getattr(self, "image", False): img_url = self.image.get("image_uri") if img_url: validate_url(img_url) + if getattr(self, 'portal_slug', None): + Portal.validate_slug_exists(self.portal_slug) def _normalize(self): super()._normalize() From c816a9b658e603f7de106ccf2d6d4662b9abd766 Mon Sep 17 00:00:00 2001 From: Ron Shapiro Date: Wed, 22 Nov 2023 11:40:48 +0200 Subject: [PATCH 45/52] Detect TextFamily with underscores everywhere. Fixes https://github.com/Sefaria/Sefaria-Project/issues/1724 --- reader/views.py | 10 +--------- sefaria/model/tests/chunk_test.py | 11 +++++++++++ sefaria/model/text.py | 5 +++++ 3 files changed, 17 insertions(+), 9 deletions(-) diff --git a/reader/views.py b/reader/views.py index b93cc2bd1e..455c9d0ce3 100644 --- a/reader/views.py +++ b/reader/views.py @@ -1215,7 +1215,6 @@ def edit_text(request, ref=None, lang=None, version=None): mode = "Add" else: # Pull a particular section to edit - version = version.replace("_", " ") if version else None #text = get_text(ref, lang=lang, version=version) text = TextFamily(Ref(ref), lang=lang, version=version).contents() text["mode"] = request.path.split("/")[1] @@ -1389,15 +1388,11 @@ def texts_api(request, tref): commentary = bool(int(request.GET.get("commentary", False))) pad = bool(int(request.GET.get("pad", 1))) versionEn = request.GET.get("ven", None) + versionHe = request.GET.get("vhe", None) firstAvailableRef = bool(int(request.GET.get("firstAvailableRef", False))) # use first available ref, which may not be the same as oref if firstAvailableRef: temp_oref = oref.first_available_section_ref() oref = temp_oref or oref # don't overwrite oref if first available section ref fails - if versionEn: - versionEn = versionEn.replace("_", " ") - versionHe = request.GET.get("vhe", None) - if versionHe: - versionHe = versionHe.replace("_", " ") layer_name = request.GET.get("layer", None) alts = bool(int(request.GET.get("alts", True))) wrapLinks = bool(int(request.GET.get("wrapLinks", False))) @@ -1553,9 +1548,6 @@ def social_image_api(request, tref): ref = Ref(tref) ref_str = ref.normal() if lang == "en" else ref.he_normal() - if version: - version = version.replace("_", " ") - tf = TextFamily(ref, stripItags=True, lang=lang, version=version, context=0, commentary=False).contents() he = tf["he"] if type(tf["he"]) is list else [tf["he"]] diff --git a/sefaria/model/tests/chunk_test.py b/sefaria/model/tests/chunk_test.py index 28e99bc099..b1c725c745 100644 --- a/sefaria/model/tests/chunk_test.py +++ b/sefaria/model/tests/chunk_test.py @@ -196,6 +196,17 @@ def test_text_family_alts(): c = tf.contents() assert c.get("alts") +def test_text_family_version_with_underscores(): + with_spaces = TextFamily( + Ref("Amos 1"), lang="he", lang2="en", commentary=False, + version="Miqra according to the Masorah", + version2="Tanakh: The Holy Scriptures, published by JPS") + with_underscores = TextFamily( + Ref("Amos 1"), lang="he", lang2="en", commentary=False, + version="Miqra_according_to_the_Masorah", + version2="Tanakh:_The_Holy_Scriptures,_published_by_JPS") + assert with_spaces.he == with_underscores.he + assert with_spaces.text == with_underscores.text def test_validate(): passing_refs = [ diff --git a/sefaria/model/text.py b/sefaria/model/text.py index 581b7eee93..e1d398a9e5 100644 --- a/sefaria/model/text.py +++ b/sefaria/model/text.py @@ -2287,6 +2287,11 @@ def __init__(self, oref, context=1, commentary=True, version=None, lang=None, elif oref.has_default_child(): oref = oref.default_child_ref() + if version: + version = version.replace("_", " ") + if version2: + version2 = version2.replace("_", " ") + self.ref = oref.normal() self.heRef = oref.he_normal() self.isComplex = oref.index.is_complex() From ea5192c8de0400ebda616d37ff633d29ea6402d9 Mon Sep 17 00:00:00 2001 From: saengel Date: Wed, 22 Nov 2023 12:14:10 +0200 Subject: [PATCH 46/52] chore(Backend topic images): revert manual fix of autoformat --- sefaria/model/topic.py | 55 +++++++++++++++++++++++++++++------------- 1 file changed, 38 insertions(+), 17 deletions(-) diff --git a/sefaria/model/topic.py b/sefaria/model/topic.py index 56f07b8272..6508def66b 100644 --- a/sefaria/model/topic.py +++ b/sefaria/model/topic.py @@ -186,7 +186,8 @@ def topics_by_link_type_recursively(self, **kwargs): topics, _ = self.topics_and_links_by_link_type_recursively(**kwargs) return topics - def topics_and_links_by_link_type_recursively(self, linkType='is-a', only_leaves=False, reverse=False, max_depth=None, min_sources=None): + def topics_and_links_by_link_type_recursively(self, linkType='is-a', only_leaves=False, reverse=False, + max_depth=None, min_sources=None): """ Gets all topics linked to `self` by `linkType`. The query is recursive so it's most useful for 'is-a' and 'displays-under' linkTypes :param linkType: str, the linkType to recursively traverse. @@ -202,7 +203,8 @@ def topics_and_links_by_link_type_recursively(self, linkType='is-a', only_leaves filter(lambda x: x.fromTopic not in below_min_sources and x.toTopic not in below_min_sources, links)) return topics, links - def _topics_and_links_by_link_type_recursively_helper(self, linkType, only_leaves, reverse, max_depth, min_sources, explored_set=None, below_min_sources_set=None): + def _topics_and_links_by_link_type_recursively_helper(self, linkType, only_leaves, reverse, max_depth, min_sources, + explored_set=None, below_min_sources_set=None): """ Helper function for `topics_and_links_by_link_type_recursively()` to carry out recursive calls :param explored_set: set(str), set of slugs already explored. To be used in recursive calls. @@ -295,7 +297,8 @@ def merge(self, other: Union['Topic', str]) -> None: # links for link in TopicLinkSetHelper.find({"$or": [{"toTopic": other_slug}, {"fromTopic": other_slug}]}): - if link.toTopic == getattr(link, 'fromTopic', None): # self-link where fromTopic and toTopic were equal before slug was changed + if link.toTopic == getattr(link, 'fromTopic', + None): # self-link where fromTopic and toTopic were equal before slug was changed link.fromTopic = self.slug link.toTopic = self.slug else: @@ -311,7 +314,9 @@ def merge(self, other: Union['Topic', str]) -> None: link.delete() except AssertionError as e: link.delete() - logger.warning('While merging {} into {}, link assertion failed with message "{}"'.format(other_slug, self.slug, str(e))) + logger.warning( + 'While merging {} into {}, link assertion failed with message "{}"'.format(other_slug, self.slug, + str(e))) # source sheets db.sheets.update_many({'topics.slug': other_slug}, {"$set": {'topics.$[element].slug': self.slug}}, @@ -336,7 +341,12 @@ def merge(self, other: Union['Topic', str]) -> None: temp_dict = getattr(self, dict_attr, {}) for k, v in getattr(other, dict_attr, {}).items(): if k in temp_dict: - logger.warning('Key {} with value {} already exists in {} for topic {}. Current value is {}'.format(k, v, dict_attr, self.slug, temp_dict[k])) + logger.warning( + 'Key {} with value {} already exists in {} for topic {}. Current value is {}'.format(k, v, + dict_attr, + self.slug, + temp_dict[ + k])) continue temp_dict[k] = v if len(temp_dict) > 0: @@ -378,7 +388,8 @@ def contents(self, **kwargs): d['primaryTitle'] = {} for lang in ('en', 'he'): d['primaryTitle'][lang] = self.get_primary_title(lang=lang, - with_disambiguation=kwargs.get('with_disambiguation', True)) + with_disambiguation=kwargs.get('with_disambiguation', + True)) if not kwargs.get("with_html"): for k, v in d.get("description", {}).items(): d["description"][k] = re.sub("<[^>]+>", "", v or "") @@ -516,7 +527,7 @@ def most_accurate_time_period(self): def most_accurate_life_period(self): ''' - :return: most accurate period as LifePeriod. Currently, the only difference from TimePeriod is the way the time period is formatted as a string. + :return: most accurate period as LifePeriod. currently the only difference from TimePeriod is the way the time period is formatted as a string. ''' return self._most_accurate_period(LifePeriod) @@ -543,7 +554,8 @@ def _authors_indexes_fill_category(self, indexes, path, include_dependant): path_end_set = {tuple(i.categories[len(path):]) for i in indexes} for index_in_path in indexes_in_path: if tuple(index_in_path.categories[len(path):]) in path_end_set: - if index_in_path.title not in temp_index_title_set and self.slug not in set(getattr(index_in_path, 'authors', [])): + if index_in_path.title not in temp_index_title_set and self.slug not in set( + getattr(index_in_path, 'authors', [])): return False return True @@ -559,7 +571,8 @@ def aggregate_authors_indexes_by_category(self): from collections import defaultdict def index_is_commentary(index): - return getattr(index, 'base_text_titles', None) is not None and len(index.base_text_titles) > 0 and getattr(index, 'collective_title', None) is not None + return getattr(index, 'base_text_titles', None) is not None and len(index.base_text_titles) > 0 and getattr( + index, 'collective_title', None) is not None indexes = self.get_authored_indexes() @@ -590,7 +603,9 @@ def index_is_commentary(index): else: index_category = Category.get_shared_category(temp_indexes) collective_title_term = Term().load({"name": collective_title}) - if index_category is None or not self._authors_indexes_fill_category(temp_indexes, index_category.path, collective_title is not None) or (collective_title is None and self._category_matches_author(index_category)): + if index_category is None or not self._authors_indexes_fill_category(temp_indexes, index_category.path, + collective_title is not None) or ( + collective_title is None and self._category_matches_author(index_category)): for temp_index in temp_indexes: index_or_cat_list += [(temp_index, None, None)] continue @@ -761,17 +776,20 @@ def _validate(self): to_topic = Topic.init(self.toTopic) if getattr(link_type, 'validFrom', False): assert from_topic.has_types( - set(link_type.validFrom)), "from topic '{}' does not have valid types '{}' for link type '{}'. Instead, types are '{}'".format(self.fromTopic, ', '.join(link_type.validFrom), self.linkType, ', '.join(from_topic.get_types())) + set(link_type.validFrom)), "from topic '{}' does not have valid types '{}' for link type '{}'. Instead, types are '{}'".format( + self.fromTopic, ', '.join(link_type.validFrom), self.linkType, ', '.join(from_topic.get_types())) if getattr(link_type, 'validTo', False): assert to_topic.has_types( - set(link_type.validTo)), "to topic '{}' does not have valid types '{}' for link type '{}'. Instead, types are '{}'".format(self.toTopic, ', '.join(link_type.validTo), self.linkType, ', '.join(to_topic.get_types())) + set(link_type.validTo)), "to topic '{}' does not have valid types '{}' for link type '{}'. Instead, types are '{}'".format( + self.toTopic, ', '.join(link_type.validTo), self.linkType, ', '.join(to_topic.get_types())) # assert this link doesn't create circular paths (in is_a link type) # should consider this test also for other non-symmetric link types such as child-of if self.linkType == TopicLinkType.isa_type: to_topic = Topic.init(self.toTopic) ancestors = to_topic.get_types() - assert self.fromTopic not in ancestors, "{} is an is-a ancestor of {} creating an illogical circle in the topics graph, here are {} ancestors: {}".format(self.fromTopic, self.toTopic, self.toTopic, ancestors) + assert self.fromTopic not in ancestors, "{} is an is-a ancestor of {} creating an illogical circle in the topics graph, here are {} ancestors: {}".format( + self.fromTopic, self.toTopic, self.toTopic, ancestors) def contents(self, **kwargs): d = super(IntraTopicLink, self).contents(**kwargs) @@ -862,7 +880,8 @@ def _validate(self): link_type = TopicLinkType.init(self.linkType, 0) if getattr(link_type, 'validTo', False): assert to_topic.has_types( - set(link_type.validTo)), "to topic '{}' does not have valid types '{}' for link type '{}'. Instead, types are '{}'".format(self.toTopic, ', '.join(link_type.validTo), self.linkType, ', '.join(to_topic.get_types())) + set(link_type.validTo)), "to topic '{}' does not have valid types '{}' for link type '{}'. Instead, types are '{}'".format( + self.toTopic, ', '.join(link_type.validTo), self.linkType, ', '.join(to_topic.get_types())) def _pre_save(self): if getattr(self, "_id", None) is None: @@ -900,7 +919,8 @@ def init_query(query, link_class): def find(query=None, page=0, limit=0, sort=[("_id", 1)], proj=None, record_kwargs=None): from sefaria.system.database import db record_kwargs = record_kwargs or {} - raw_records = getattr(db, TopicLinkHelper.collection).find(query, proj).sort(sort).skip(page * limit).limit(limit) + raw_records = getattr(db, TopicLinkHelper.collection).find(query, proj).sort(sort).skip(page * limit).limit( + limit) return [TopicLinkHelper.init_by_class(r, **record_kwargs) for r in raw_records] @@ -1024,10 +1044,11 @@ def process_topic_description_change(topic, **kwargs): IntraTopicLinkSet( {"toTopic": topic.slug, "linkType": "related-to", "dataSource": "learning-team-editing-tool"}).delete() refLinkType = 'popular-writing-of' if getattr(topic, 'subclass', '') == 'author' else 'about' - RefTopicLinkSet({"toTopic": topic.slug, "linkType": refLinkType, "dataSource": "learning-team-editing-tool"}).delete() + RefTopicLinkSet( + {"toTopic": topic.slug, "linkType": refLinkType, "dataSource": "learning-team-editing-tool"}).delete() markdown_links = set() - for lang, val in kwargs['new'].items(): # put each link in a set, so we don't try to create duplicate of same link + for lang, val in kwargs['new'].items(): # put each link in a set so we dont try to create duplicate of same link for m in re.findall('\[.*?\]\((.*?)\)', val): markdown_links.add(m) From 2db060db5cf7187974f1e1475fc140a028ade809 Mon Sep 17 00:00:00 2001 From: saengel Date: Wed, 22 Nov 2023 12:16:53 +0200 Subject: [PATCH 47/52] chore(Backend topic images): Undo autoformat --- sefaria/model/topic.py | 191 +++++++++++++---------------------------- 1 file changed, 60 insertions(+), 131 deletions(-) diff --git a/sefaria/model/topic.py b/sefaria/model/topic.py index 6508def66b..0a211f7867 100644 --- a/sefaria/model/topic.py +++ b/sefaria/model/topic.py @@ -4,7 +4,6 @@ from .text import Ref, IndexSet, AbstractTextRecord from .category import Category from sefaria.system.exceptions import InputError, DuplicateRecordError -from sefaria.system.validators import validate_url from sefaria.model.timeperiod import TimePeriod, LifePeriod from sefaria.model.portal import Portal from sefaria.system.database import db @@ -12,7 +11,6 @@ from sefaria.model.place import Place import regex as re from typing import Type - logger = structlog.get_logger(__name__) @@ -43,42 +41,16 @@ class Topic(abst.SluggedAbstractMongoRecord, AbstractTitledObject): 'numSources', 'shouldDisplay', 'parasha', # name of parsha as it appears in `parshiot` collection - 'ref', - # dictionary for topics with refs associated with them (e.g. parashah) containing strings `en`, `he`, and `url`. + 'ref', # dictionary for topics with refs associated with them (e.g. parashah) containing strings `en`, `he`, and `url`. 'good_to_promote', 'description_published', # bool to keep track of which descriptions we've vetted 'isAmbiguous', # True if topic primary title can refer to multiple other topics - "data_source", # any topic edited manually should display automatically in the TOC and this flag ensures this + "data_source", #any topic edited manually should display automatically in the TOC and this flag ensures this 'image', "portal_slug", # slug to relevant Portal object ] - attr_schemas = { - "image": { - "image_uri": { - "type": "string", - "required": True, - "regex": "^https://storage\.googleapis\.com/img\.sefaria\.org/topics/.*?" - }, - "image_caption": { - "type": "dict", - "required": True, - "schema": { - "en": { - "type": "string", - "required": True - }, - "he": { - "type": "string", - "required": True - } - } - } - } - } - ROOT = "Main Menu" # the root of topic TOC is not a topic, so this is a fake slug. we know it's fake because it's not in normal form - - # this constant is helpful in the topic editor tool functions in this file + # this constant is helpful in the topic editor tool functions in this file def load(self, query, proj=None): if self.__class__ != Topic: @@ -100,9 +72,6 @@ def _validate(self): super(Topic, self)._validate() if getattr(self, 'subclass', False): assert self.subclass in self.subclass_map, f"Field `subclass` set to {self.subclass} which is not one of the valid subclass keys in `Topic.subclass_map`. Valid keys are {', '.join(self.subclass_map.keys())}" - if getattr(self, "image", False): - img_url = self.image.get("image_uri") - if img_url: validate_url(img_url) if getattr(self, 'portal_slug', None): Portal.validate_slug_exists(self.portal_slug) @@ -164,6 +133,7 @@ def get_types(self, types=None, curr_path=None, search_slug_set=None): new_topic.get_types(types, new_path, search_slug_set) return types + def change_description(self, desc, cat_desc=None): """ Sets description in all cases and sets categoryDescription if this is a top level topic @@ -185,9 +155,8 @@ def change_description(self, desc, cat_desc=None): def topics_by_link_type_recursively(self, **kwargs): topics, _ = self.topics_and_links_by_link_type_recursively(**kwargs) return topics - - def topics_and_links_by_link_type_recursively(self, linkType='is-a', only_leaves=False, reverse=False, - max_depth=None, min_sources=None): + + def topics_and_links_by_link_type_recursively(self, linkType='is-a', only_leaves=False, reverse=False, max_depth=None, min_sources=None): """ Gets all topics linked to `self` by `linkType`. The query is recursive so it's most useful for 'is-a' and 'displays-under' linkTypes :param linkType: str, the linkType to recursively traverse. @@ -196,15 +165,11 @@ def topics_and_links_by_link_type_recursively(self, linkType='is-a', only_leaves :param max_depth: How many levels below this one to traverse. 1 returns only this node's children, 0 returns only this node and None means unlimited. :return: list(Topic) """ - topics, links, below_min_sources = self._topics_and_links_by_link_type_recursively_helper(linkType, only_leaves, - reverse, max_depth, - min_sources) - links = list( - filter(lambda x: x.fromTopic not in below_min_sources and x.toTopic not in below_min_sources, links)) + topics, links, below_min_sources = self._topics_and_links_by_link_type_recursively_helper(linkType, only_leaves, reverse, max_depth, min_sources) + links = list(filter(lambda x: x.fromTopic not in below_min_sources and x.toTopic not in below_min_sources, links)) return topics, links - def _topics_and_links_by_link_type_recursively_helper(self, linkType, only_leaves, reverse, max_depth, min_sources, - explored_set=None, below_min_sources_set=None): + def _topics_and_links_by_link_type_recursively_helper(self, linkType, only_leaves, reverse, max_depth, min_sources, explored_set=None, below_min_sources_set=None): """ Helper function for `topics_and_links_by_link_type_recursively()` to carry out recursive calls :param explored_set: set(str), set of slugs already explored. To be used in recursive calls. @@ -233,8 +198,7 @@ def _topics_and_links_by_link_type_recursively_helper(self, linkType, only_leave continue if max_depth is None or max_depth > 0: next_depth = max_depth if max_depth is None else max_depth - 1 - temp_topics, temp_links, temp_below_min_sources = child_topic._topics_and_links_by_link_type_recursively_helper( - linkType, only_leaves, reverse, next_depth, min_sources, explored_set, below_min_sources_set) + temp_topics, temp_links, temp_below_min_sources = child_topic._topics_and_links_by_link_type_recursively_helper(linkType, only_leaves, reverse, next_depth, min_sources, explored_set, below_min_sources_set) topics += temp_topics links += temp_links below_min_sources_set |= temp_below_min_sources @@ -251,9 +215,7 @@ def has_types(self, search_slug_set) -> bool: return len(search_slug_set.intersection(types)) > 0 def should_display(self) -> bool: - return getattr(self, 'shouldDisplay', True) and ( - getattr(self, 'numSources', 0) > 0 or self.has_description() or getattr(self, "data_source", - "") == "sefaria") + return getattr(self, 'shouldDisplay', True) and (getattr(self, 'numSources', 0) > 0 or self.has_description() or getattr(self, "data_source", "") == "sefaria") def has_description(self) -> bool: """ @@ -282,6 +244,7 @@ def set_slug(self, new_slug) -> None: self.save() # so that topic with this slug exists when saving links to it self.merge(old_slug) + def merge(self, other: Union['Topic', str]) -> None: """ :param other: Topic or old slug to migrate from @@ -297,15 +260,13 @@ def merge(self, other: Union['Topic', str]) -> None: # links for link in TopicLinkSetHelper.find({"$or": [{"toTopic": other_slug}, {"fromTopic": other_slug}]}): - if link.toTopic == getattr(link, 'fromTopic', - None): # self-link where fromTopic and toTopic were equal before slug was changed + if link.toTopic == getattr(link, 'fromTopic', None): # self-link where fromTopic and toTopic were equal before slug was changed link.fromTopic = self.slug link.toTopic = self.slug else: attr = 'toTopic' if link.toTopic == other_slug else 'fromTopic' setattr(link, attr, self.slug) - if getattr(link, 'fromTopic', - None) == link.toTopic: # self-link where fromTopic and toTopic are equal AFTER slug was changed + if getattr(link, 'fromTopic', None) == link.toTopic: # self-link where fromTopic and toTopic are equal AFTER slug was changed link.delete() continue try: @@ -314,19 +275,16 @@ def merge(self, other: Union['Topic', str]) -> None: link.delete() except AssertionError as e: link.delete() - logger.warning( - 'While merging {} into {}, link assertion failed with message "{}"'.format(other_slug, self.slug, - str(e))) + logger.warning('While merging {} into {}, link assertion failed with message "{}"'.format(other_slug, self.slug, str(e))) # source sheets - db.sheets.update_many({'topics.slug': other_slug}, {"$set": {'topics.$[element].slug': self.slug}}, - array_filters=[{"element.slug": other_slug}]) + db.sheets.update_many({'topics.slug': other_slug}, {"$set": {'topics.$[element].slug': self.slug}}, array_filters=[{"element.slug": other_slug}]) # indexes for index in IndexSet({"authors": other_slug}): index.authors = [self.slug if author_slug == other_slug else author_slug for author_slug in index.authors] props = index._saveable_attrs() - db.index.replace_one({"title": index.title}, props) + db.index.replace_one({"title":index.title}, props) if isinstance(other, Topic): # titles @@ -341,12 +299,7 @@ def merge(self, other: Union['Topic', str]) -> None: temp_dict = getattr(self, dict_attr, {}) for k, v in getattr(other, dict_attr, {}).items(): if k in temp_dict: - logger.warning( - 'Key {} with value {} already exists in {} for topic {}. Current value is {}'.format(k, v, - dict_attr, - self.slug, - temp_dict[ - k])) + logger.warning('Key {} with value {} already exists in {} for topic {}. Current value is {}'.format(k, v, dict_attr, self.slug, temp_dict[k])) continue temp_dict[k] = v if len(temp_dict) > 0: @@ -387,9 +340,7 @@ def contents(self, **kwargs): d = {'slug': self.slug} if mini else super(Topic, self).contents(**kwargs) d['primaryTitle'] = {} for lang in ('en', 'he'): - d['primaryTitle'][lang] = self.get_primary_title(lang=lang, - with_disambiguation=kwargs.get('with_disambiguation', - True)) + d['primaryTitle'][lang] = self.get_primary_title(lang=lang, with_disambiguation=kwargs.get('with_disambiguation', True)) if not kwargs.get("with_html"): for k, v in d.get("description", {}).items(): d["description"][k] = re.sub("<[^>]+>", "", v or "") @@ -452,7 +403,6 @@ class PersonTopic(Topic): """ Represents a topic which is a person. Not necessarily an author of a book. """ - @staticmethod def get_person_by_key(key: str): """ @@ -468,7 +418,7 @@ def annotate_place(self, d): if place and heKey not in properties: value, dataSource = place['value'], place['dataSource'] place_obj = Place().load({"key": value}) - if place_obj: + if place_obj: name = place_obj.primary_name('he') d['properties'][heKey] = {'value': name, 'dataSource': dataSource} return d @@ -491,7 +441,7 @@ def contents(self, **kwargs): } } return d - + # A person may have an era, a generation, or a specific birth and death years, which each may be approximate. # They may also have none of these... def _most_accurate_period(self, time_period_class: Type[TimePeriod]) -> Optional[LifePeriod]: @@ -531,7 +481,6 @@ def most_accurate_life_period(self): ''' return self._most_accurate_period(LifePeriod) - class AuthorTopic(PersonTopic): """ Represents a topic which is an author of a book. Can be used on the `authors` field of `Index` @@ -554,8 +503,7 @@ def _authors_indexes_fill_category(self, indexes, path, include_dependant): path_end_set = {tuple(i.categories[len(path):]) for i in indexes} for index_in_path in indexes_in_path: if tuple(index_in_path.categories[len(path):]) in path_end_set: - if index_in_path.title not in temp_index_title_set and self.slug not in set( - getattr(index_in_path, 'authors', [])): + if index_in_path.title not in temp_index_title_set and self.slug not in set(getattr(index_in_path, 'authors', [])): return False return True @@ -571,22 +519,20 @@ def aggregate_authors_indexes_by_category(self): from collections import defaultdict def index_is_commentary(index): - return getattr(index, 'base_text_titles', None) is not None and len(index.base_text_titles) > 0 and getattr( - index, 'collective_title', None) is not None + return getattr(index, 'base_text_titles', None) is not None and len(index.base_text_titles) > 0 and getattr(index, 'collective_title', None) is not None indexes = self.get_authored_indexes() - - index_or_cat_list = [] # [(index_or_cat, collective_title_term, base_category)] - cat_aggregator = defaultdict( - lambda: defaultdict(list)) # of shape {(collective_title, top_cat): {(icat, category): [index_object]}} + + index_or_cat_list = [] # [(index_or_cat, collective_title_term, base_category)] + cat_aggregator = defaultdict(lambda: defaultdict(list)) # of shape {(collective_title, top_cat): {(icat, category): [index_object]}} MAX_ICAT_FROM_END_TO_CONSIDER = 2 for index in indexes: is_comm = index_is_commentary(index) base = library.get_index(index.base_text_titles[0]) if is_comm else index collective_title = index.collective_title if is_comm else None - base_cat_path = tuple(base.categories[:-MAX_ICAT_FROM_END_TO_CONSIDER + 1]) + base_cat_path = tuple(base.categories[:-MAX_ICAT_FROM_END_TO_CONSIDER+1]) for icat in range(len(base.categories) - MAX_ICAT_FROM_END_TO_CONSIDER, len(base.categories)): - cat_aggregator[(collective_title, base_cat_path)][(icat, tuple(base.categories[:icat + 1]))] += [index] + cat_aggregator[(collective_title, base_cat_path)][(icat, tuple(base.categories[:icat+1]))] += [index] for (collective_title, _), cat_choice_dict in cat_aggregator.items(): cat_choices_sorted = sorted(cat_choice_dict.items(), key=lambda x: (len(x[1]), x[0][0]), reverse=True) (_, best_base_cat_path), temp_indexes = cat_choices_sorted[0] @@ -595,7 +541,7 @@ def index_is_commentary(index): continue if best_base_cat_path == ('Talmud', 'Bavli'): best_base_cat_path = ('Talmud',) # hard-coded to get 'Rashi on Talmud' instead of 'Rashi on Bavli' - + base_category = Category().load({"path": list(best_base_cat_path)}) if collective_title is None: index_category = base_category @@ -603,9 +549,7 @@ def index_is_commentary(index): else: index_category = Category.get_shared_category(temp_indexes) collective_title_term = Term().load({"name": collective_title}) - if index_category is None or not self._authors_indexes_fill_category(temp_indexes, index_category.path, - collective_title is not None) or ( - collective_title is None and self._category_matches_author(index_category)): + if index_category is None or not self._authors_indexes_fill_category(temp_indexes, index_category.path, collective_title is not None) or (collective_title is None and self._category_matches_author(index_category)): for temp_index in temp_indexes: index_or_cat_list += [(temp_index, None, None)] continue @@ -627,9 +571,9 @@ def get_aggregated_urls_for_authors_indexes(self) -> list: en_desc = getattr(index_or_cat, 'enShortDesc', None) he_desc = getattr(index_or_cat, 'heShortDesc', None) if isinstance(index_or_cat, Index): - unique_urls.append({"url": f'/{index_or_cat.title.replace(" ", "_")}', - "title": {"en": index_or_cat.get_title('en'), "he": index_or_cat.get_title('he')}, - "description": {"en": en_desc, "he": he_desc}}) + unique_urls.append({"url":f'/{index_or_cat.title.replace(" ", "_")}', + "title": {"en": index_or_cat.get_title('en'), "he": index_or_cat.get_title('he')}, + "description":{"en": en_desc, "he": he_desc}}) else: if collective_title_term is None: cat_term = Term().load({"name": index_or_cat.sharedTitle}) @@ -641,7 +585,7 @@ def get_aggregated_urls_for_authors_indexes(self) -> list: he_text = f'{collective_title_term.get_primary_title("he")} על {base_category_term.get_primary_title("he")}' unique_urls.append({"url": f'/texts/{"/".join(index_or_cat.path)}', "title": {"en": en_text, "he": he_text}, - "description": {"en": en_desc, "he": he_desc}}) + "description":{"en": en_desc, "he": he_desc}}) return unique_urls @staticmethod @@ -657,10 +601,9 @@ def __init__(self, query=None, *args, **kwargs): if self.recordClass != Topic: # include class name of recordClass + any class names of subclasses query = query or {} - subclass_names = [self.recordClass.__name__] + [klass.__name__ for klass in - self.recordClass.all_subclasses()] + subclass_names = [self.recordClass.__name__] + [klass.__name__ for klass in self.recordClass.all_subclasses()] query['subclass'] = {"$in": [self.recordClass.reverse_subclass_map[name] for name in subclass_names]} - + super().__init__(query=query, *args, **kwargs) @staticmethod @@ -700,8 +643,7 @@ class TopicLinkHelper(object): ] optional_attrs = [ 'generatedBy', - 'order', - # dict with some data on how to sort this link. can have key 'custom_order' which should trump other data + 'order', # dict with some data on how to sort this link. can have key 'custom_order' which should trump other data 'isJudgementCall', ] generated_by_sheets = "sheet-topic-aggregator" @@ -753,9 +695,8 @@ def _validate(self): TopicDataSource.validate_slug_exists(self.dataSource) # check for duplicates - duplicate = IntraTopicLink().load( - {"linkType": self.linkType, "fromTopic": self.fromTopic, "toTopic": self.toTopic, - "class": getattr(self, 'class'), "_id": {"$ne": getattr(self, "_id", None)}}) + duplicate = IntraTopicLink().load({"linkType": self.linkType, "fromTopic": self.fromTopic, "toTopic": self.toTopic, + "class": getattr(self, 'class'), "_id": {"$ne": getattr(self, "_id", None)}}) if duplicate is not None: raise DuplicateRecordError( "Duplicate intra topic link for linkType '{}', fromTopic '{}', toTopic '{}'".format( @@ -763,9 +704,8 @@ def _validate(self): link_type = TopicLinkType.init(self.linkType, 0) if link_type.slug == link_type.inverseSlug: - duplicate_inverse = IntraTopicLink().load( - {"linkType": self.linkType, "toTopic": self.fromTopic, "fromTopic": self.toTopic, - "class": getattr(self, 'class'), "_id": {"$ne": getattr(self, "_id", None)}}) + duplicate_inverse = IntraTopicLink().load({"linkType": self.linkType, "toTopic": self.fromTopic, "fromTopic": self.toTopic, + "class": getattr(self, 'class'), "_id": {"$ne": getattr(self, "_id", None)}}) if duplicate_inverse is not None: raise DuplicateRecordError( "Duplicate intra topic link in the inverse direction of the symmetric linkType '{}', fromTopic '{}', toTopic '{}' exists".format( @@ -775,21 +715,16 @@ def _validate(self): from_topic = Topic.init(self.fromTopic) to_topic = Topic.init(self.toTopic) if getattr(link_type, 'validFrom', False): - assert from_topic.has_types( - set(link_type.validFrom)), "from topic '{}' does not have valid types '{}' for link type '{}'. Instead, types are '{}'".format( - self.fromTopic, ', '.join(link_type.validFrom), self.linkType, ', '.join(from_topic.get_types())) + assert from_topic.has_types(set(link_type.validFrom)), "from topic '{}' does not have valid types '{}' for link type '{}'. Instead, types are '{}'".format(self.fromTopic, ', '.join(link_type.validFrom), self.linkType, ', '.join(from_topic.get_types())) if getattr(link_type, 'validTo', False): - assert to_topic.has_types( - set(link_type.validTo)), "to topic '{}' does not have valid types '{}' for link type '{}'. Instead, types are '{}'".format( - self.toTopic, ', '.join(link_type.validTo), self.linkType, ', '.join(to_topic.get_types())) + assert to_topic.has_types(set(link_type.validTo)), "to topic '{}' does not have valid types '{}' for link type '{}'. Instead, types are '{}'".format(self.toTopic, ', '.join(link_type.validTo), self.linkType, ', '.join(to_topic.get_types())) # assert this link doesn't create circular paths (in is_a link type) # should consider this test also for other non-symmetric link types such as child-of if self.linkType == TopicLinkType.isa_type: to_topic = Topic.init(self.toTopic) ancestors = to_topic.get_types() - assert self.fromTopic not in ancestors, "{} is an is-a ancestor of {} creating an illogical circle in the topics graph, here are {} ancestors: {}".format( - self.fromTopic, self.toTopic, self.toTopic, ancestors) + assert self.fromTopic not in ancestors, "{} is an is-a ancestor of {} creating an illogical circle in the topics graph, here are {} ancestors: {}".format(self.fromTopic, self.toTopic, self.toTopic, ancestors) def contents(self, **kwargs): d = super(IntraTopicLink, self).contents(**kwargs) @@ -879,15 +814,12 @@ def _validate(self): to_topic = Topic.init(self.toTopic) link_type = TopicLinkType.init(self.linkType, 0) if getattr(link_type, 'validTo', False): - assert to_topic.has_types( - set(link_type.validTo)), "to topic '{}' does not have valid types '{}' for link type '{}'. Instead, types are '{}'".format( - self.toTopic, ', '.join(link_type.validTo), self.linkType, ', '.join(to_topic.get_types())) - + assert to_topic.has_types(set(link_type.validTo)), "to topic '{}' does not have valid types '{}' for link type '{}'. Instead, types are '{}'".format(self.toTopic, ', '.join(link_type.validTo), self.linkType, ', '.join(to_topic.get_types())) + def _pre_save(self): if getattr(self, "_id", None) is None: # check for duplicates - query = {"linkType": self.linkType, "ref": self.ref, "toTopic": self.toTopic, - "dataSource": getattr(self, 'dataSource', {"$exists": False}), "class": getattr(self, 'class')} + query = {"linkType": self.linkType, "ref": self.ref, "toTopic": self.toTopic, "dataSource": getattr(self, 'dataSource', {"$exists": False}), "class": getattr(self, 'class')} if getattr(self, "charLevelData", None): query["charLevelData.startChar"] = self.charLevelData['startChar'] query["charLevelData.endChar"] = self.charLevelData['endChar'] @@ -896,9 +828,8 @@ def _pre_save(self): duplicate = RefTopicLink().load(query) if duplicate is not None: - raise DuplicateRecordError( - "Duplicate ref topic link for linkType '{}', ref '{}', toTopic '{}', dataSource '{}'".format( - self.linkType, self.ref, self.toTopic, getattr(self, 'dataSource', 'N/A'))) + raise DuplicateRecordError("Duplicate ref topic link for linkType '{}', ref '{}', toTopic '{}', dataSource '{}'".format( + self.linkType, self.ref, self.toTopic, getattr(self, 'dataSource', 'N/A'))) def contents(self, **kwargs): d = super(RefTopicLink, self).contents(**kwargs) @@ -907,7 +838,6 @@ def contents(self, **kwargs): d.pop('toTopic') return d - class TopicLinkSetHelper(object): @staticmethod def init_query(query, link_class): @@ -919,8 +849,7 @@ def init_query(query, link_class): def find(query=None, page=0, limit=0, sort=[("_id", 1)], proj=None, record_kwargs=None): from sefaria.system.database import db record_kwargs = record_kwargs or {} - raw_records = getattr(db, TopicLinkHelper.collection).find(query, proj).sort(sort).skip(page * limit).limit( - limit) + raw_records = getattr(db, TopicLinkHelper.collection).find(query, proj).sort(sort).skip(page * limit).limit(limit) return [TopicLinkHelper.init_by_class(r, **record_kwargs) for r in raw_records] @@ -1020,13 +949,11 @@ def process_index_title_change_in_topic_links(indx, **kwargs): except InputError: logger.warning("Failed to convert ref data from: {} to {}".format(kwargs['old'], kwargs['new'])) - def process_index_delete_in_topic_links(indx, **kwargs): from sefaria.model.text import prepare_index_regex_for_dependency_process pattern = prepare_index_regex_for_dependency_process(indx) RefTopicLinkSet({"ref": {"$regex": pattern}}).delete() - def process_topic_delete(topic): RefTopicLinkSet({"toTopic": topic.slug}).delete() IntraTopicLinkSet({"$or": [{"toTopic": topic.slug}, {"fromTopic": topic.slug}]}).delete() @@ -1034,21 +961,18 @@ def process_topic_delete(topic): sheet["topics"] = [t for t in sheet["topics"] if t["slug"] != topic.slug] db.sheets.save(sheet) - def process_topic_description_change(topic, **kwargs): """ Upon topic description change, get rid of old markdown links and save any new ones """ # need to delete currently existing links but dont want to delete link if its still in the description # so load up a dictionary of relevant data -> link - IntraTopicLinkSet( - {"toTopic": topic.slug, "linkType": "related-to", "dataSource": "learning-team-editing-tool"}).delete() + IntraTopicLinkSet({"toTopic": topic.slug, "linkType": "related-to", "dataSource": "learning-team-editing-tool"}).delete() refLinkType = 'popular-writing-of' if getattr(topic, 'subclass', '') == 'author' else 'about' - RefTopicLinkSet( - {"toTopic": topic.slug, "linkType": refLinkType, "dataSource": "learning-team-editing-tool"}).delete() + RefTopicLinkSet({"toTopic": topic.slug, "linkType": refLinkType, "dataSource": "learning-team-editing-tool"}).delete() markdown_links = set() - for lang, val in kwargs['new'].items(): # put each link in a set so we dont try to create duplicate of same link + for lang, val in kwargs['new'].items(): # put each link in a set so we dont try to create duplicate of same link for m in re.findall('\[.*?\]\((.*?)\)', val): markdown_links.add(m) @@ -1068,7 +992,12 @@ def process_topic_description_change(topic, **kwargs): ref = Ref(markdown_link).normal() except InputError as e: continue - ref_topic_dict = {"toTopic": topic.slug, "dataSource": "learning-team-editing-tool", - "linkType": refLinkType, + ref_topic_dict = {"toTopic": topic.slug, "dataSource": "learning-team-editing-tool", "linkType": refLinkType, 'ref': ref} RefTopicLink(ref_topic_dict).save() + + + + + + From 90b45085b13b4895e4e24a07fd438dd99e538f4a Mon Sep 17 00:00:00 2001 From: saengel Date: Wed, 22 Nov 2023 12:19:04 +0200 Subject: [PATCH 48/52] chore(Backend topic images): Restore validation code with proper format --- sefaria/model/topic.py | 29 +++++++++++++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/sefaria/model/topic.py b/sefaria/model/topic.py index 0a211f7867..2304a7a5b4 100644 --- a/sefaria/model/topic.py +++ b/sefaria/model/topic.py @@ -5,6 +5,7 @@ from .category import Category from sefaria.system.exceptions import InputError, DuplicateRecordError from sefaria.model.timeperiod import TimePeriod, LifePeriod +from sefaria.system.validators import validate_url from sefaria.model.portal import Portal from sefaria.system.database import db import structlog, bleach @@ -49,6 +50,31 @@ class Topic(abst.SluggedAbstractMongoRecord, AbstractTitledObject): 'image', "portal_slug", # slug to relevant Portal object ] + + attr_schemas = { + "image": { + "image_uri": { + "type": "string", + "required": True, + "regex": "^https://storage\.googleapis\.com/img\.sefaria\.org/topics/.*?" + }, + "image_caption": { + "type": "dict", + "required": True, + "schema": { + "en": { + "type": "string", + "required": True + }, + "he": { + "type": "string", + "required": True + } + } + } + } + } + ROOT = "Main Menu" # the root of topic TOC is not a topic, so this is a fake slug. we know it's fake because it's not in normal form # this constant is helpful in the topic editor tool functions in this file @@ -74,6 +100,9 @@ def _validate(self): assert self.subclass in self.subclass_map, f"Field `subclass` set to {self.subclass} which is not one of the valid subclass keys in `Topic.subclass_map`. Valid keys are {', '.join(self.subclass_map.keys())}" if getattr(self, 'portal_slug', None): Portal.validate_slug_exists(self.portal_slug) + if getattr(self, "image", False): + img_url = self.image.get("image_uri") + if img_url: validate_url(img_url) def _normalize(self): super()._normalize() From 1fdc6ab858d8fd3cb4b5ddd8fd77af88cb2c28af Mon Sep 17 00:00:00 2001 From: saengel Date: Wed, 22 Nov 2023 12:22:52 +0200 Subject: [PATCH 49/52] fix(Backend topic images): Create new migration subdirectory, add script for migration --- scripts/migrations/add_topic_images.py | 40 ++++++++++++++++++++++++++ 1 file changed, 40 insertions(+) create mode 100644 scripts/migrations/add_topic_images.py diff --git a/scripts/migrations/add_topic_images.py b/scripts/migrations/add_topic_images.py new file mode 100644 index 0000000000..ed42ba96dd --- /dev/null +++ b/scripts/migrations/add_topic_images.py @@ -0,0 +1,40 @@ +import django + +django.setup() + +from sefaria.helper.topic import add_image_to_topic + +## Adding images + +hardcodedTopicImagesMap = { + 'rosh-hashanah': {'image_uri': 'https://storage.googleapis.com/img.sefaria.org/topics/rosh-hashanah.jpeg', + 'enCaption': 'Rosh Hashanah, Arthur Szyk (1894-1951) Tempera and ink on paper. New Canaan, 1948. Collection of Yeshiva University Museum. Gift of Charles Frost', + 'heCaption': 'ראש השנה, ארתור שיק, ארה״ב 1948. אוסף ישיבה יוניברסיטי'}, + + 'yom-kippur': {'image_uri': 'https://storage.googleapis.com/img.sefaria.org/topics/yom-kippur.jpeg', + 'enCaption': 'Micrography of Jonah being swallowed by the fish. Germany, 1300-1500, The British Library', + 'heCaption': 'מיקרוגרפיה של יונה בבטן הדג, מתוך ספר יונה ההפטרה של יום כיפור, 1300-1500'}, + + 'the-four-species': {'image_uri': 'https://storage.googleapis.com/img.sefaria.org/topics/the-four-species.jpg', + 'enCaption': 'Etrog container, K B, late 19th century, Germany. The Jewish Museum, Gift of Dr. Harry G. Friedman', + 'heCaption': 'תיבת אתרוג, סוף המאה ה19, גרמניה. המוזיאון היהודי בניו יורק, מתנת דר. הארי ג. פרידמן '}, + + 'sukkot': {'image_uri': 'https://storage.googleapis.com/img.sefaria.org/topics/sukkot.jpg', + 'enCaption': 'Detail of a painting of a sukkah. Image taken from f. 316v of Forli Siddur. 1383, Italian rite. The British Library', + 'heCaption': 'פרט ציור של סוכה עם שולחן פרוש ושלוש דמויות. דימוי מתוך סידור פורלי, 1383 איטליה'}, + + 'simchat-torah': {'image_uri': 'https://storage.googleapis.com/img.sefaria.org/topics/simchat-torah.jpg', + 'enCaption': 'Rosh Hashanah postcard: Hakafot, Haim Yisroel Goldberg (1888-1943) Publisher: Williamsburg Post Card Co. Germany, ca. 1915 Collection of Yeshiva University Museum', + 'heCaption': 'גלויה לראש השנה: הקפות, חיים גולדברג, גרמניה 1915, אוסף ישיבה יוניברסיטי'}, + + 'shabbat': {'image_uri': 'https://storage.googleapis.com/img.sefaria.org/topics/shabbat.jpg', + 'enCaption': 'Friday Evening, Isidor Kaufmann, Austria c. 1920. The Jewish Museum, Gift of Mr. and Mrs. M. R. Schweitzer', + 'heCaption': 'שישי בערב, איזידור קאופמן, וינה 1920. המוזיאון היהודי בניו יורק, מתנת מר וגברת מ.ר. שוויצר'}, + +} + +for topic in hardcodedTopicImagesMap: + add_image_to_topic(topic, + image_uri=hardcodedTopicImagesMap[topic]["image_uri"], + en_caption=hardcodedTopicImagesMap[topic]["enCaption"], + he_caption=hardcodedTopicImagesMap[topic]["heCaption"]) \ No newline at end of file From 4f77a137a84c5be8d2aaa3fc2f828778f89abb1a Mon Sep 17 00:00:00 2001 From: nsantacruz Date: Thu, 23 Nov 2023 12:58:27 +0200 Subject: [PATCH 50/52] fix(portal): Pass caption prop properly using the new ImageWithCaption component --- static/js/NavSidebar.jsx | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/static/js/NavSidebar.jsx b/static/js/NavSidebar.jsx index baeb12a952..69976798fe 100644 --- a/static/js/NavSidebar.jsx +++ b/static/js/NavSidebar.jsx @@ -1,11 +1,10 @@ import React, { useState, useEffect } from 'react'; import classNames from 'classnames'; import Sefaria from './sefaria/sefaria'; -import {AppStoreButton, DonateLink, EnglishText, HebrewText} from './Misc' +import {AppStoreButton, DonateLink, EnglishText, HebrewText, ImageWithCaption} from './Misc' import {NewsletterSignUpForm} from "./NewsletterSignUpForm"; import {InterfaceText, ProfileListing, Dropdown} from './Misc'; import { Promotions } from './Promotions' -import {TopicImage} from './TopicPage' const NavSidebar = ({modules}) => { return
@@ -803,7 +802,7 @@ const PortalAbout = ({title, description, image_uri, image_caption}) => {
- +
From 6cd9f89a21b32d5745180bd19255037aa3a300bb Mon Sep 17 00:00:00 2001 From: nsantacruz Date: Thu, 23 Nov 2023 13:13:42 +0200 Subject: [PATCH 51/52] fix(linker): allow possibility of caption not existing for portals --- static/js/Misc.jsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/static/js/Misc.jsx b/static/js/Misc.jsx index acb8c88440..4e591c3ee8 100644 --- a/static/js/Misc.jsx +++ b/static/js/Misc.jsx @@ -3175,7 +3175,7 @@ const ImageWithCaption = ({photoLink, caption }) => {
- +
); } From a2fa2683d1c496f00b59389d6f898002f3146f6d Mon Sep 17 00:00:00 2001 From: yonadavGit Date: Sun, 26 Nov 2023 13:59:13 +0200 Subject: [PATCH 52/52] chore(ProfilePicIcon): aligned default non-default profile pic button --- static/css/s2.css | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/static/css/s2.css b/static/css/s2.css index ef7475bd52..490ccb31d2 100644 --- a/static/css/s2.css +++ b/static/css/s2.css @@ -7083,6 +7083,7 @@ But not to use a display block directive that might break continuous mode for ot justify-content: center; align-items: center; flex-direction: column; + margin-bottom: 4px; } .profile-page { background-color: var(--lightest-grey); @@ -10644,7 +10645,6 @@ cursor: pointer; color: white; font-size: 75px; font-family: "Roboto", "Helvetica Neue", "Helvetica", sans-serif; - margin-bottom: 3px; } .default-profile-img.invisible { visibility: hidden;