From da8ee9b9c3d5be227030535605d5833d156b6807 Mon Sep 17 00:00:00 2001
From: Ilya Y <75578537+ilya-bkv@users.noreply.github.com>
Date: Thu, 19 Oct 2023 17:54:38 +0300
Subject: [PATCH 01/27] signIn/getSession optimization (#95)
Co-authored-by: Igor Lobanov Вышел Путин на крыльцо, Потеряв вконец лицо. Об опасности конца Говорил с того крыльца, Про предателей, про бунт, О вреде военных хунт, Про гражданскую войну, Про несчастную страну, Положив на музыкантов Вот за это всю вину. К сожаленью президент, Запилив такой контент, Не сдержавшись в выраженьях, Упустил такой момент: Чтобы кресло сохранить, Нужно меньше говорить, Как тебя на этом кресле Не проблемно заменить. Автор неизвестен В России вещи, о которых трубят из каждого утюга, все равно происходят неожиданно. Долго говорили, насколько невероятна война с Украиной, а это случилось. Говорили о том, что частные армии опасны для государственной бюрократии, — начался военный мятеж. Шутили «будем бомбить Воронеж» (не смотри, что в анекдоте) — и это тоже случилось. Говорили, что рано или поздно люди из системы начнут жрать друг друга, — и вот вчерашний герой Пригожин уже вымарывается из российской истории. 23 июня Евгений Пригожин начал вооруженный мятеж после того, как министр обороны Сергей Шойгу потребовал, чтобы наемники ЧВК «Вагнер» подписали контракты с Минобороны до 1 июля. То есть попытался лишить Пригожина его кормовой и силовой базы в виде частной армии. По версии Пригожина, Минобороны нанесло ракетный удар по лагерю «Вагнера», а также направило спецназ для захвата его самого. Однако, как выяснилось, о начавшемся отходе «вагнеров» из захваченного Бахмута и готовящемся мятеже уже 22 июня знала ФСБ из официального письма заместителя Пригожина в ЧВК Андрея Трошева. В США и вовсе заявили, что наблюдали за подготовкой мятежа две недели. О том же сообщила немецкая разведка. И, наконец, провалившееся задержание Пригожина должно было состояться не в лагере наемников, а в Санкт-Петербурге. Военный мятеж предварялся обращением Пригожина в телеграм, в котором он открыл общественности секрет Полишинеля. В частности, обвинил руководство Минобороны в развале армии, рассказал, что захват Украины нужен был для распила российскими олигархами бизнеса на новых территориях, как это было на Донбассе, заявил, что пора покончить с обманом и коррупцией в стране, и потребовал выдать ему министра обороны Шойгу и главу генштаба Герасимова. Шойгу спешно свалил из Ростова. Сам город и военные объекты Ростовской области были заняты «Вагнером». Нужно ли говорить, что все полицейские разбежались, решив, что на этом их полномочия — всё. Такой серьезный митинг разогнать шокерами и дубинками решительно нельзя. В Кремле едва успевали подносить и опорожнять чемоданчики. Ведь Путин не испытывал подобных стрессов со времен Болотной площади, когда реально испугался потери власти, после чего стал превращать правоохранительную систему в политическую полицию, создал Росгвардию и «заболел цифровизацией» как инструментом тотальной слежки за гражданами. Гражданское общество с белыми ленточками подавили, но беда пришла со стороны людей с шевронами «Наш бизнес — смерть, и бизнес идет хорошо». Страшно, очень страшно. Путин записал обращение, в котором назвал наемников предателями, обещал неминуемое наказание (которое таки минуло) и вспомнил 1917 год. Услышав про 1917 год, все, кроме «болота», в течение суток ждали досрочного прекращения полномочий президента. Правящая элита, включая Путина, покинула Москву. Косплеить украинское руководство и записывать ролики на Красной площади не стали. В Москве остался только Володин. Когда все утихло, он решил повысить свой аппаратный вес и призвал наказать бежавших. То есть почти всю верхушку страны. А в ней, между прочим, олигархи путинской волны, друзья детства, кооператив «Озеро» и всё, что навевает теплые воспоминания из прошлого. Отвечая на обращение Путина, Пригожин неосторожно заявил, что президент ошибается, и мятеж — это не мятеж, а «марш справедливости». При этом глава ЧВК требовал, чтобы никто не сопротивлялся колоннам наемников, движущимся на Москву, а любой, кто встанет на пути, будет уничтожен. Потому что никто не встанет на пути у справедливости. После некоторой фрустрации ФСБ очухалась и забегала по военкоматам, собирая информацию о женах и родственниках «вагнеров». Под Москвой начали разрывать экскаваторами дороги и выставлять грузовики с песком. Кадыров заверил Путина в своей преданности и отправил в направлении Ростова батальон «Ахмат», который в очередной раз весьма благоразумно не доехал до точки соприкосновения. Вечером 24 июня, когда колонна «Вагнера» была в 200 км от Москвы, Пригожин решил развернуть колонну и вернуться в полевые лагеря во избежание кровопролития (умолчав о куче перебитой российской авиации с РЭБ и ее экипажах). Ответственность за срыв мятежа взял на себя Лукашенко и сымитировал переговоры с Пригожиным, передав тому предложения Путина, который не осмелился лично ответить на звонок мятежника. Лукашенко с радостью вписался во что-то более легитимирующее его шаткую власть, чем осмотр «обосранных» коров в колхозах. Позже Песков сообщил, что Пригожин уезжает в Беларусь, а те «вагнера», которые на участвовали в мятеже, могут заключить контракты с Минобороны. В Беларуси был раскинут лагерь на 8 тысяч человек. У Путина от избытка адреналина развязался язык. Он провел открытое совещание Совбеза, записывал обращения, рассказывал о попытке начать гражданскую войну, клеймил предателей, благодарил всех, кто не разбежался. И, наконец, сдал все пароли и явки, заявив, что за год государство потратило на «Вагнер» и Пригожина 276 млрд рублей. Позже пропагандист Дмитрий Киселев назвал цифру в 858 млрд, которые Пригожин получил через холдинг «Конкорд». Все бы ничего, ведь активная часть гражданского общества обо всем и так знала. И о Сирии, и об Африке, и об Украине. Но Путин забылся и разоткровенничался перед своим ядерным электоратом, тем самым «болотом», которое смотрит телик, мало осведомлено о ЧВК, верит в сильного президента и патриотическую сплоченность. А теперь им рассказали, что государство финансирует через левые схемы частные военизированные формирования, которые ставят страну на грань гражданской войны. Президент теперь не находится над схваткой, а является ее частью, и спасает его Лукашенко, который всеми силами демонстрирует, что его яйца крепче, чем картофель и покрышка БелАЗа. Главу Росгвардии Золотова наградили за защиту Москвы, которая не состоялась. А самой Росгвардии обещали выдать танки и прочую тяжелую технику, которая теперь не отправится на фронт. Если будет выдана. Видимо, ожидают повторного марша государственных и полугосударственных военных на Москву. Так феодализм оформился и в военной сфере: армия против Украины, другая армия против этой армии, региональные армии на случай войны с федералами и частные армии на случай войны с конкурирующими корпорациями за активы. Не удивительно, что Пригожина возмутило, что его хотят лишить своей армии, когда у всех уважаемых людей она есть. Уголовное дело против Пригожина было юридически неграмотно прекращено, несмотря на убитых «вагнерами» летчиков, которых Путин почтил минутой молчания, выступая на крыльце Грановитой палаты Кремля перед сотрудниками ФСО и военным руководством. В частности, 28 июня сообщили, что арестован генерал Суровикин, лоббист «Вагнера» в Министерстве обороны, несмотря на то что осудил мятеж после его начала, записав соответствующее видеообращение при неустановленных обстоятельствах. Правозащитник Ольга Романова рассказала, что в СИЗО «Лефортово» была принята и передана задержанному открытка, отправленная на имя Суровикина С. В. Предположительно, сейчас Суровикин находится под другой мерой пресечения — запретом на совершение определенных действий. Неизвестна судьба генерала Мизинцева, который до увольнения из Минобороны обеспечивал серые поставки «вагнерам» боеприпасов во время войны с Украиной, за что был уволен и немедленно трудоустроен заместителем в ЧВК «Вагнер». В течение недели после мятежа начались чистки в Минобороны. Бизнес-империю Пригожина начали рушить, включая его силовые, медийные и чисто коммерческие ресурсы. Его репутацию тоже уничтожают. Пропагандисты на федеральных каналах развернулись на 180 градусов, клеймят предателя и рассказывают от том, насколько преувеличена роль «Вагнера» на фронте. И, конечно же, показывают «глубинному народу» материалы обысков во дворце Пригожина с найденными в нем наградным оружием, париками для маскировки и, по неподтвержденным данным, костюмом Папы Римского. На протяжении 2023 года в военной и чекистской бюрократии устоялась концепция того, что зарвавшегося Пригожина (выскочку, человека не из системы, с чрезмерными политическими амбициями) готовят на заклание. Слишком быстрый рост популярности при отсутствии аппаратного веса. Или, если короче, «кто он вообще такой, чтобы так борзеть?». Минобороны ограничивало снабжение ЧВК боеприпасами, минировало пути отхода «Вагнера» из Бахмута и принуждало наемников заключить контракты с Минобороны. То есть пыталось лишить Пригожина его собственной пирамиды, на вершине которой он таки имел аппаратный вес. Но этот аппарат слишком обособился от военной бюрократии. Нарушил пресловутую монополию государства на легальное насилие. Опасно. Обнулять «Вагнер» Шойгу начал еще во время сирийской кампании, где Россия помогала Башару Асаду сохранить свою диктаторскую власть. Во времена этой же кампании случилось уничтожение 200 «вагнеров», шедших на захват нефтеперерабатывающего завода. На запрос США: «Это ваши?» — Минобороны ответило: «Не, не наши». Американцы пожали плечами и нанесли по колонне авиаудар, полностью очистивший ландшафт от всей имеющейся на нем фауны. Раз Два Три Понимая, куда все движется, длительное время Пригожин как когда-то генералиссимус Валленштейн (тоже владевший частной армией) находился в полевых лагерях, откуда критиковал государственную армию, заверяя императора в том, что будет воевать в его интересах, но по своему усмотрению. Как и для Валленштейна, для Пригожина частная армия являлась единственным гарантом выживания в борьбе с тяжеловесами из государственной бюрократии — Шойгу и Герасимовым. Те не забыли оскорблений Пригожина и долго низводили численный состав «Вагнера» к минимуму, перекрыв доступ к вербовке зеков, держа наемников на передней линии фронта для перемалывания их руками ВСУ и, наконец, требуя перейти на контракты с Минобороны. А что насчет сообщников, единомышленников или по крайней мере сочувствующих Пригожину в государственной бюрократии? Можно говорить о ситуативном содействии отдельных чиновников Пригожину, но не о спланированном мятеже с целью смены высших должностных лиц, включая президента. Поскольку государство авторитарное, кажется, что у него единый центр принятия решений. Эта иллюзия заставляет думать, что все происходящее — это часть некоего плана. Право народа на восстание. Можно ли защищать демократию силой? Как Пригожин вербовал заключенных на войну. Репортаж из колонии о приезде основателя ЧВК «Вагнер» «Вы — пушечное мясо». Почему российские власти творят всякий треш? «Они хотят вырваться из русской тюрьмы». Ольга Романова о заключенных на фронте и новых законах после мятежа Пригожина «Я не могу желать поражения русской армии». Почему националисты и нацболы не выступают против войны в Украине? Цитата любопытно смещает эмбед А текст после цитаты пишется здесь Вышел Путин на крыльцо, Потеряв вконец лицо. Об опасности конца Говорил с того крыльца, Про предателей, про бунт, О вреде военных хунт, Про гражданскую войну, Про несчастную страну, Положив на музыкантов Вот за это всю вину. К сожаленью президент, Запилив такой контент, Не сдержавшись в выраженьях, Упустил такой момент: Чтобы кресло сохранить, Нужно меньше говорить, Как тебя на этом кресле Не проблемно заменить. Автор неизвестен В России вещи, о которых трубят из каждого утюга, все равно происходят неожиданно. Долго говорили, насколько невероятна война с Украиной, а это случилось. Говорили о том, что частные армии опасны для государственной бюрократии, — начался военный мятеж. Шутили «будем бомбить Воронеж» (не смотри, что в анекдоте) — и это тоже случилось. Говорили, что рано или поздно люди из системы начнут жрать друг друга, — и вот вчерашний герой Пригожин уже вымарывается из российской истории. 23 июня Евгений Пригожин начал вооруженный мятеж после того, как министр обороны Сергей Шойгу потребовал, чтобы наемники ЧВК «Вагнер» подписали контракты с Минобороны до 1 июля. То есть попытался лишить Пригожина его кормовой и силовой базы в виде частной армии. По версии Пригожина, Минобороны нанесло ракетный удар по лагерю «Вагнера», а также направило спецназ для захвата его самого. Однако, как выяснилось, о начавшемся отходе «вагнеров» из захваченного Бахмута и готовящемся мятеже уже 22 июня знала ФСБ из официального письма заместителя Пригожина в ЧВК Андрея Трошева. В США и вовсе заявили, что наблюдали за подготовкой мятежа две недели. О том же сообщила немецкая разведка. И, наконец, провалившееся задержание Пригожина должно было состояться не в лагере наемников, а в Санкт-Петербурге. Военный мятеж предварялся обращением Пригожина в телеграм, в котором он открыл общественности секрет Полишинеля. В частности, обвинил руководство Минобороны в развале армии, рассказал, что захват Украины нужен был для распила российскими олигархами бизнеса на новых территориях, как это было на Донбассе, заявил, что пора покончить с обманом и коррупцией в стране, и потребовал выдать ему министра обороны Шойгу и главу генштаба Герасимова. Шойгу спешно свалил из Ростова. Сам город и военные объекты Ростовской области были заняты «Вагнером». Нужно ли говорить, что все полицейские разбежались, решив, что на этом их полномочия — всё. Такой серьезный митинг разогнать шокерами и дубинками решительно нельзя. В Кремле едва успевали подносить и опорожнять чемоданчики. Ведь Путин не испытывал подобных стрессов со времен Болотной площади, когда реально испугался потери власти, после чего стал превращать правоохранительную систему в политическую полицию, создал Росгвардию и «заболел цифровизацией» как инструментом тотальной слежки за гражданами. Гражданское общество с белыми ленточками подавили, но беда пришла со стороны людей с шевронами «Наш бизнес — смерть, и бизнес идет хорошо». Страшно, очень страшно. Путин записал обращение, в котором назвал наемников предателями, обещал неминуемое наказание (которое таки минуло) и вспомнил 1917 год. Услышав про 1917 год, все, кроме «болота», в течение суток ждали досрочного прекращения полномочий президента. Правящая элита, включая Путина, покинула Москву. Косплеить украинское руководство и записывать ролики на Красной площади не стали. В Москве остался только Володин. Когда все утихло, он решил повысить свой аппаратный вес и призвал наказать бежавших. То есть почти всю верхушку страны. А в ней, между прочим, олигархи путинской волны, друзья детства, кооператив «Озеро» и всё, что навевает теплые воспоминания из прошлого. Отвечая на обращение Путина, Пригожин неосторожно заявил, что президент ошибается, и мятеж — это не мятеж, а «марш справедливости». При этом глава ЧВК требовал, чтобы никто не сопротивлялся колоннам наемников, движущимся на Москву, а любой, кто встанет на пути, будет уничтожен. Потому что никто не встанет на пути у справедливости. После некоторой фрустрации ФСБ очухалась и забегала по военкоматам, собирая информацию о женах и родственниках «вагнеров». Под Москвой начали разрывать экскаваторами дороги и выставлять грузовики с песком. Кадыров заверил Путина в своей преданности и отправил в направлении Ростова батальон «Ахмат», который в очередной раз весьма благоразумно не доехал до точки соприкосновения. Вечером 24 июня, когда колонна «Вагнера» была в 200 км от Москвы, Пригожин решил развернуть колонну и вернуться в полевые лагеря во избежание кровопролития (умолчав о куче перебитой российской авиации с РЭБ и ее экипажах). Ответственность за срыв мятежа взял на себя Лукашенко и сымитировал переговоры с Пригожиным, передав тому предложения Путина, который не осмелился лично ответить на звонок мятежника. Лукашенко с радостью вписался во что-то более легитимирующее его шаткую власть, чем осмотр «обосранных» коров в колхозах. Позже Песков сообщил, что Пригожин уезжает в Беларусь, а те «вагнера», которые на участвовали в мятеже, могут заключить контракты с Минобороны. В Беларуси был раскинут лагерь на 8 тысяч человек. У Путина от избытка адреналина развязался язык. Он провел открытое совещание Совбеза, записывал обращения, рассказывал о попытке начать гражданскую войну, клеймил предателей, благодарил всех, кто не разбежался. И, наконец, сдал все пароли и явки, заявив, что за год государство потратило на «Вагнер» и Пригожина 276 млрд рублей. Позже пропагандист Дмитрий Киселев назвал цифру в 858 млрд, которые Пригожин получил через холдинг «Конкорд». Все бы ничего, ведь активная часть гражданского общества обо всем и так знала. И о Сирии, и об Африке, и об Украине. Но Путин забылся и разоткровенничался перед своим ядерным электоратом, тем самым «болотом», которое смотрит телик, мало осведомлено о ЧВК, верит в сильного президента и патриотическую сплоченность. А теперь им рассказали, что государство финансирует через левые схемы частные военизированные формирования, которые ставят страну на грань гражданской войны. Президент теперь не находится над схваткой, а является ее частью, и спасает его Лукашенко, который всеми силами демонстрирует, что его яйца крепче, чем картофель и покрышка БелАЗа. Главу Росгвардии Золотова наградили за защиту Москвы, которая не состоялась. А самой Росгвардии обещали выдать танки и прочую тяжелую технику, которая теперь не отправится на фронт. Если будет выдана. Видимо, ожидают повторного марша государственных и полугосударственных военных на Москву. Так феодализм оформился и в военной сфере: армия против Украины, другая армия против этой армии, региональные армии на случай войны с федералами и частные армии на случай войны с конкурирующими корпорациями за активы. Не удивительно, что Пригожина возмутило, что его хотят лишить своей армии, когда у всех уважаемых людей она есть. Уголовное дело против Пригожина было юридически неграмотно прекращено, несмотря на убитых «вагнерами» летчиков, которых Путин почтил минутой молчания, выступая на крыльце Грановитой палаты Кремля перед сотрудниками ФСО и военным руководством. В частности, 28 июня сообщили, что арестован генерал Суровикин, лоббист «Вагнера» в Министерстве обороны, несмотря на то что осудил мятеж после его начала, записав соответствующее видеообращение при неустановленных обстоятельствах. Правозащитник Ольга Романова рассказала, что в СИЗО «Лефортово» была принята и передана задержанному открытка, отправленная на имя Суровикина С. В. Предположительно, сейчас Суровикин находится под другой мерой пресечения — запретом на совершение определенных действий. Неизвестна судьба генерала Мизинцева, который до увольнения из Минобороны обеспечивал серые поставки «вагнерам» боеприпасов во время войны с Украиной, за что был уволен и немедленно трудоустроен заместителем в ЧВК «Вагнер». В течение недели после мятежа начались чистки в Минобороны. Бизнес-империю Пригожина начали рушить, включая его силовые, медийные и чисто коммерческие ресурсы. Его репутацию тоже уничтожают. Пропагандисты на федеральных каналах развернулись на 180 градусов, клеймят предателя и рассказывают от том, насколько преувеличена роль «Вагнера» на фронте. И, конечно же, показывают «глубинному народу» материалы обысков во дворце Пригожина с найденными в нем наградным оружием, париками для маскировки и, по неподтвержденным данным, костюмом Папы Римского. На протяжении 2023 года в военной и чекистской бюрократии устоялась концепция того, что зарвавшегося Пригожина (выскочку, человека не из системы, с чрезмерными политическими амбициями) готовят на заклание. Слишком быстрый рост популярности при отсутствии аппаратного веса. Или, если короче, «кто он вообще такой, чтобы так борзеть?». Минобороны ограничивало снабжение ЧВК боеприпасами, минировало пути отхода «Вагнера» из Бахмута и принуждало наемников заключить контракты с Минобороны. То есть пыталось лишить Пригожина его собственной пирамиды, на вершине которой он таки имел аппаратный вес. Но этот аппарат слишком обособился от военной бюрократии. Нарушил пресловутую монополию государства на легальное насилие. Опасно. Обнулять «Вагнер» Шойгу начал еще во время сирийской кампании, где Россия помогала Башару Асаду сохранить свою диктаторскую власть. Во времена этой же кампании случилось уничтожение 200 «вагнеров», шедших на захват нефтеперерабатывающего завода. На запрос США: «Это ваши?» — Минобороны ответило: «Не, не наши». Американцы пожали плечами и нанесли по колонне авиаудар, полностью очистивший ландшафт от всей имеющейся на нем фауны. Раз Два Три Понимая, куда все движется, длительное время Пригожин как когда-то генералиссимус Валленштейн (тоже владевший частной армией) находился в полевых лагерях, откуда критиковал государственную армию, заверяя императора в том, что будет воевать в его интересах, но по своему усмотрению. Как и для Валленштейна, для Пригожина частная армия являлась единственным гарантом выживания в борьбе с тяжеловесами из государственной бюрократии — Шойгу и Герасимовым. Те не забыли оскорблений Пригожина и долго низводили численный состав «Вагнера» к минимуму, перекрыв доступ к вербовке зеков, держа наемников на передней линии фронта для перемалывания их руками ВСУ и, наконец, требуя перейти на контракты с Минобороны. А что насчет сообщников, единомышленников или по крайней мере сочувствующих Пригожину в государственной бюрократии? Можно говорить о ситуативном содействии отдельных чиновников Пригожину, но не о спланированном мятеже с целью смены высших должностных лиц, включая президента. Поскольку государство авторитарное, кажется, что у него единый центр принятия решений. Эта иллюзия заставляет думать, что все происходящее — это часть некоего плана. Право народа на восстание. Можно ли защищать демократию силой? Как Пригожин вербовал заключенных на войну. Репортаж из колонии о приезде основателя ЧВК «Вагнер» «Вы — пушечное мясо». Почему российские власти творят всякий треш? «Они хотят вырваться из русской тюрьмы». Ольга Романова о заключенных на фронте и новых законах после мятежа Пригожина «Я не могу желать поражения русской армии». Почему националисты и нацболы не выступают против войны в Украине? Цитата любопытно смещает эмбед А текст после цитаты пишется здесьМногообещающее начало
Тухлый финал
Предыстория конфликта Пригожина и Минобороны
По воспоминаниям корреспондента пригожинской пропагандистской помойки РИА «ФАН» Кирилла Романовского, весной 2016 года, после взятия наемниками Пальмиры, Шойгу заявил, что какие-то гопники не могут получать государственные награды РФ. И раздал награды своим гопникам из Минобороны.Список
Список
Список
Сообщники Пригожина в элитах
Тут случился треш)))
Читайте также
Многообещающее начало
Тухлый финал
Предыстория конфликта Пригожина и Минобороны
По воспоминаниям корреспондента пригожинской пропагандистской помойки РИА «ФАН» Кирилла Романовского, весной 2016 года, после взятия наемниками Пальмиры, Шойгу заявил, что какие-то гопники не могут получать государственные награды РФ. И раздал награды своим гопникам из Минобороны.Список
Список
Список
Сообщники Пригожина в элитах
Тут случился треш)))
Читайте также
\s*
",
r"
\s*
self.span_highlight = True
- elif (
- self.current_class == "lead"
- and not self.inheader
- and not self.span_highlight
- ):
+ elif self.current_class == "lead" and not self.inheader and not self.span_highlight:
# self.o("==") # NOTE: CriticMarkup {==
self.span_lead = True
else:
@@ -479,11 +469,7 @@ class HTML2Text(html.parser.HTMLParser):
and not self.span_lead
and not self.span_highlight
):
- if (
- start
- and self.preceding_data
- and self.preceding_data[-1] == self.strong_mark[0]
- ):
+ if start and self.preceding_data and self.preceding_data[-1] == self.strong_mark[0]:
strong = " " + self.strong_mark
self.preceding_data += " "
else:
@@ -548,13 +534,8 @@ class HTML2Text(html.parser.HTMLParser):
"href" in attrs
and not attrs["href"].startswith("#_ftn")
and attrs["href"] is not None
- and not (
- self.skip_internal_links and attrs["href"].startswith("#")
- )
- and not (
- self.ignore_mailto_links
- and attrs["href"].startswith("mailto:")
- )
+ and not (self.skip_internal_links and attrs["href"].startswith("#"))
+ and not (self.ignore_mailto_links and attrs["href"].startswith("mailto:"))
):
self.astack.append(attrs)
self.maybe_automatic_link = attrs["href"]
@@ -638,9 +619,7 @@ class HTML2Text(html.parser.HTMLParser):
self.o("![" + escape_md(alt) + "]")
if self.inline_links:
href = attrs.get("href") or ""
- self.o(
- "(" + escape_md(urlparse.urljoin(self.baseurl, href)) + ")"
- )
+ self.o("(" + escape_md(urlparse.urljoin(self.baseurl, href)) + ")")
else:
i = self.previousIndex(attrs)
if i is not None:
@@ -696,9 +675,7 @@ class HTML2Text(html.parser.HTMLParser):
# WARNING: does not line up - s > 9 correctly.
parent_list = None
for list in self.list:
- self.o(
- " " if parent_list == "ol" and list.name == "ul" else " "
- )
+ self.o(" " if parent_list == "ol" and list.name == "ul" else " ")
parent_list = list.name
if li.name == "ul":
@@ -787,9 +764,7 @@ class HTML2Text(html.parser.HTMLParser):
self.pbr()
self.br_toggle = " "
- def o(
- self, data: str, puredata: bool = False, force: Union[bool, str] = False
- ) -> None:
+ def o(self, data: str, puredata: bool = False, force: Union[bool, str] = False) -> None:
"""
Deal with indentation and whitespace
"""
@@ -864,9 +839,7 @@ class HTML2Text(html.parser.HTMLParser):
self.out(" ")
self.space = False
- if self.a and (
- (self.p_p == 2 and self.links_each_paragraph) or force == "end"
- ):
+ if self.a and ((self.p_p == 2 and self.links_each_paragraph) or force == "end"):
if force == "end":
self.out("\n")
@@ -925,11 +898,7 @@ class HTML2Text(html.parser.HTMLParser):
if self.maybe_automatic_link is not None:
href = self.maybe_automatic_link
- if (
- href == data
- and self.absolute_url_matcher.match(href)
- and self.use_automatic_links
- ):
+ if href == data and self.absolute_url_matcher.match(href) and self.use_automatic_links:
self.o("<" + data + ">")
self.empty_link = False
return
@@ -1000,9 +969,7 @@ class HTML2Text(html.parser.HTMLParser):
self.inline_links = False
for para in text.split("\n"):
if len(para) > 0:
- if not skipwrap(
- para, self.wrap_links, self.wrap_list_items, self.wrap_tables
- ):
+ if not skipwrap(para, self.wrap_links, self.wrap_list_items, self.wrap_tables):
indent = ""
if para.startswith(" " + self.ul_item_mark):
# list item continuation: add a double indent to the
@@ -1043,9 +1010,7 @@ class HTML2Text(html.parser.HTMLParser):
return result
-def html2text(
- html: str, baseurl: str = "", bodywidth: Optional[int] = config.BODY_WIDTH
-) -> str:
+def html2text(html: str, baseurl: str = "", bodywidth: Optional[int] = config.BODY_WIDTH) -> str:
h = html.strip() or ""
if h:
h = HTML2Text(baseurl=baseurl, bodywidth=bodywidth)
diff --git a/migration/html2text/cli.py b/migration/html2text/cli.py
index dbaba28b..62e0738f 100644
--- a/migration/html2text/cli.py
+++ b/migration/html2text/cli.py
@@ -117,10 +117,7 @@ def main() -> None:
dest="images_with_size",
action="store_true",
default=config.IMAGES_WITH_SIZE,
- help=(
- "Write image tags with height and width attrs as raw html to retain "
- "dimensions"
- ),
+ help=("Write image tags with height and width attrs as raw html to retain " "dimensions"),
)
p.add_argument(
"-g",
@@ -260,9 +257,7 @@ def main() -> None:
default=config.CLOSE_QUOTE,
help="The character used to close quotes",
)
- p.add_argument(
- "--version", action="version", version=".".join(map(str, __version__))
- )
+ p.add_argument("--version", action="version", version=".".join(map(str, __version__)))
p.add_argument("filename", nargs="?")
p.add_argument("encoding", nargs="?", default="utf-8")
args = p.parse_args()
diff --git a/migration/html2text/utils.py b/migration/html2text/utils.py
index 1cf22b52..fd6a16c2 100644
--- a/migration/html2text/utils.py
+++ b/migration/html2text/utils.py
@@ -4,9 +4,7 @@ from typing import Dict, List, Optional
from . import config
unifiable_n = {
- html.entities.name2codepoint[k]: v
- for k, v in config.UNIFIABLE.items()
- if k != "nbsp"
+ html.entities.name2codepoint[k]: v for k, v in config.UNIFIABLE.items() if k != "nbsp"
}
@@ -156,9 +154,7 @@ def list_numbering_start(attrs: Dict[str, Optional[str]]) -> int:
return 0
-def skipwrap(
- para: str, wrap_links: bool, wrap_list_items: bool, wrap_tables: bool
-) -> bool:
+def skipwrap(para: str, wrap_links: bool, wrap_list_items: bool, wrap_tables: bool) -> bool:
# If it appears to contain a link
# don't wrap
if not wrap_links and config.RE_LINK.search(para):
@@ -236,9 +232,7 @@ def reformat_table(lines: List[str], right_margin: int) -> List[str]:
max_width += [len(x) + right_margin for x in cols[-(num_cols - max_cols) :]]
max_cols = num_cols
- max_width = [
- max(len(x) + right_margin, old_len) for x, old_len in zip(cols, max_width)
- ]
+ max_width = [max(len(x) + right_margin, old_len) for x, old_len in zip(cols, max_width)]
# reformat
new_lines = []
@@ -247,15 +241,13 @@ def reformat_table(lines: List[str], right_margin: int) -> List[str]:
if set(line.strip()) == set("-|"):
filler = "-"
new_cols = [
- x.rstrip() + (filler * (M - len(x.rstrip())))
- for x, M in zip(cols, max_width)
+ x.rstrip() + (filler * (M - len(x.rstrip()))) for x, M in zip(cols, max_width)
]
new_lines.append("|-" + "|".join(new_cols) + "|")
else:
filler = " "
new_cols = [
- x.rstrip() + (filler * (M - len(x.rstrip())))
- for x, M in zip(cols, max_width)
+ x.rstrip() + (filler * (M - len(x.rstrip()))) for x, M in zip(cols, max_width)
]
new_lines.append("| " + "|".join(new_cols) + "|")
return new_lines
diff --git a/migration/tables/comments.py b/migration/tables/comments.py
index 82e32924..092850c8 100644
--- a/migration/tables/comments.py
+++ b/migration/tables/comments.py
@@ -5,61 +5,48 @@ from dateutil.parser import parse as date_parse
from base.orm import local_session
from migration.html2text import html2text
from orm.reaction import Reaction, ReactionKind
-from orm.shout import ShoutReactionsFollower
+from orm.shout import Shout, ShoutReactionsFollower
from orm.topic import TopicFollower
from orm.user import User
-from orm.shout import Shout
ts = datetime.now(tz=timezone.utc)
def auto_followers(session, topics, reaction_dict):
# creating shout's reactions following for reaction author
- following1 = session.query(
- ShoutReactionsFollower
- ).where(
- ShoutReactionsFollower.follower == reaction_dict["createdBy"]
- ).filter(
- ShoutReactionsFollower.shout == reaction_dict["shout"]
- ).first()
+ following1 = (
+ session.query(ShoutReactionsFollower)
+ .where(ShoutReactionsFollower.follower == reaction_dict["createdBy"])
+ .filter(ShoutReactionsFollower.shout == reaction_dict["shout"])
+ .first()
+ )
if not following1:
following1 = ShoutReactionsFollower.create(
- follower=reaction_dict["createdBy"],
- shout=reaction_dict["shout"],
- auto=True
+ follower=reaction_dict["createdBy"], shout=reaction_dict["shout"], auto=True
)
session.add(following1)
# creating topics followings for reaction author
for t in topics:
- tf = session.query(
- TopicFollower
- ).where(
- TopicFollower.follower == reaction_dict["createdBy"]
- ).filter(
- TopicFollower.topic == t['id']
- ).first()
+ tf = (
+ session.query(TopicFollower)
+ .where(TopicFollower.follower == reaction_dict["createdBy"])
+ .filter(TopicFollower.topic == t['id'])
+ .first()
+ )
if not tf:
topic_following = TopicFollower.create(
- follower=reaction_dict["createdBy"],
- topic=t['id'],
- auto=True
+ follower=reaction_dict["createdBy"], topic=t['id'], auto=True
)
session.add(topic_following)
def migrate_ratings(session, entry, reaction_dict):
for comment_rating_old in entry.get("ratings", []):
- rater = (
- session.query(User)
- .filter(User.oid == comment_rating_old["createdBy"])
- .first()
- )
+ rater = session.query(User).filter(User.oid == comment_rating_old["createdBy"]).first()
re_reaction_dict = {
"shout": reaction_dict["shout"],
"replyTo": reaction_dict["id"],
- "kind": ReactionKind.LIKE
- if comment_rating_old["value"] > 0
- else ReactionKind.DISLIKE,
+ "kind": ReactionKind.LIKE if comment_rating_old["value"] > 0 else ReactionKind.DISLIKE,
"createdBy": rater.id if rater else 1,
}
cts = comment_rating_old.get("createdAt")
@@ -68,18 +55,15 @@ def migrate_ratings(session, entry, reaction_dict):
try:
# creating reaction from old rating
rr = Reaction.create(**re_reaction_dict)
- following2 = session.query(
- ShoutReactionsFollower
- ).where(
- ShoutReactionsFollower.follower == re_reaction_dict['createdBy']
- ).filter(
- ShoutReactionsFollower.shout == rr.shout
- ).first()
+ following2 = (
+ session.query(ShoutReactionsFollower)
+ .where(ShoutReactionsFollower.follower == re_reaction_dict['createdBy'])
+ .filter(ShoutReactionsFollower.shout == rr.shout)
+ .first()
+ )
if not following2:
following2 = ShoutReactionsFollower.create(
- follower=re_reaction_dict['createdBy'],
- shout=rr.shout,
- auto=True
+ follower=re_reaction_dict['createdBy'], shout=rr.shout, auto=True
)
session.add(following2)
session.add(rr)
@@ -150,9 +134,7 @@ async def migrate(entry, storage):
else:
stage = "author and old id found"
try:
- shout = session.query(
- Shout
- ).where(Shout.slug == old_shout["slug"]).one()
+ shout = session.query(Shout).where(Shout.slug == old_shout["slug"]).one()
if shout:
reaction_dict["shout"] = shout.id
reaction_dict["createdBy"] = author.id if author else 1
@@ -190,17 +172,20 @@ def migrate_2stage(old_comment, idmap):
comment = session.query(Reaction).where(Reaction.id == new_id).first()
try:
if new_replyto_id:
- new_reply = session.query(Reaction).where(Reaction.id == new_replyto_id).first()
+ new_reply = (
+ session.query(Reaction).where(Reaction.id == new_replyto_id).first()
+ )
if not new_reply:
print(new_replyto_id)
raise Exception("cannot find reply by id!")
comment.replyTo = new_reply.id
session.add(comment)
- srf = session.query(ShoutReactionsFollower).where(
- ShoutReactionsFollower.shout == comment.shout
- ).filter(
- ShoutReactionsFollower.follower == comment.createdBy
- ).first()
+ srf = (
+ session.query(ShoutReactionsFollower)
+ .where(ShoutReactionsFollower.shout == comment.shout)
+ .filter(ShoutReactionsFollower.follower == comment.createdBy)
+ .first()
+ )
if not srf:
srf = ShoutReactionsFollower.create(
shout=comment.shout, follower=comment.createdBy, auto=True
diff --git a/migration/tables/content_items.py b/migration/tables/content_items.py
index a2297d98..92a97c24 100644
--- a/migration/tables/content_items.py
+++ b/migration/tables/content_items.py
@@ -1,16 +1,18 @@
-from datetime import datetime, timezone
import json
+import re
+from datetime import datetime, timezone
+
from dateutil.parser import parse as date_parse
from sqlalchemy.exc import IntegrityError
from transliterate import translit
+
from base.orm import local_session
from migration.extract import extract_html, extract_media
from orm.reaction import Reaction, ReactionKind
-from orm.shout import Shout, ShoutTopic, ShoutReactionsFollower
+from orm.shout import Shout, ShoutReactionsFollower, ShoutTopic
+from orm.topic import Topic, TopicFollower
from orm.user import User
-from orm.topic import TopicFollower, Topic
from services.stat.viewed import ViewedStorage
-import re
OLD_DATE = "2016-03-05 22:22:00.350000"
ts = datetime.now(tz=timezone.utc)
@@ -91,11 +93,12 @@ async def create_shout(shout_dict):
s = Shout.create(**shout_dict)
author = s.authors[0]
with local_session() as session:
- srf = session.query(ShoutReactionsFollower).where(
- ShoutReactionsFollower.shout == s.id
- ).filter(
- ShoutReactionsFollower.follower == author.id
- ).first()
+ srf = (
+ session.query(ShoutReactionsFollower)
+ .where(ShoutReactionsFollower.shout == s.id)
+ .filter(ShoutReactionsFollower.follower == author.id)
+ .first()
+ )
if not srf:
srf = ShoutReactionsFollower.create(shout=s.id, follower=author.id, auto=True)
session.add(srf)
@@ -137,11 +140,14 @@ async def migrate(entry, storage):
r = {
"layout": type2layout[entry["type"]],
"title": entry["title"],
- "authors": [author, ],
+ "authors": [
+ author,
+ ],
"slug": get_shout_slug(entry),
"cover": (
- "https://images.discours.io/unsafe/" +
- entry["thumborId"] if entry.get("thumborId") else entry.get("image", {}).get("url")
+ "https://images.discours.io/unsafe/" + entry["thumborId"]
+ if entry.get("thumborId")
+ else entry.get("image", {}).get("url")
),
"visibility": "public" if entry.get("published") else "community",
"publishedAt": date_parse(entry.get("publishedAt")) if entry.get("published") else None,
@@ -150,7 +156,7 @@ async def migrate(entry, storage):
"updatedAt": date_parse(entry["updatedAt"]) if "updatedAt" in entry else ts,
"createdBy": author.id,
"topics": await add_topics_follower(entry, storage, author),
- "body": extract_html(entry, cleanup=True)
+ "body": extract_html(entry, cleanup=True),
}
# main topic patch
@@ -184,7 +190,9 @@ async def migrate(entry, storage):
# udpate data
shout_dict = shout.dict()
- shout_dict["authors"] = [author.dict(), ]
+ shout_dict["authors"] = [
+ author.dict(),
+ ]
# shout topics aftermath
shout_dict["topics"] = await topics_aftermath(r, storage)
@@ -193,7 +201,9 @@ async def migrate(entry, storage):
await content_ratings_to_reactions(entry, shout_dict["slug"])
# shout views
- await ViewedStorage.increment(shout_dict["slug"], amount=entry.get("views", 1), viewer='old-discours')
+ await ViewedStorage.increment(
+ shout_dict["slug"], amount=entry.get("views", 1), viewer='old-discours'
+ )
# del shout_dict['ratings']
storage["shouts"]["by_oid"][entry["_id"]] = shout_dict
@@ -205,7 +215,9 @@ async def add_topics_follower(entry, storage, user):
topics = set([])
category = entry.get("category")
topics_by_oid = storage["topics"]["by_oid"]
- oids = [category, ] + entry.get("tags", [])
+ oids = [
+ category,
+ ] + entry.get("tags", [])
for toid in oids:
tslug = topics_by_oid.get(toid, {}).get("slug")
if tslug:
@@ -217,19 +229,14 @@ async def add_topics_follower(entry, storage, user):
try:
tpc = session.query(Topic).where(Topic.slug == tpcslug).first()
if tpc:
- tf = session.query(
- TopicFollower
- ).where(
- TopicFollower.follower == user.id
- ).filter(
- TopicFollower.topic == tpc.id
- ).first()
+ tf = (
+ session.query(TopicFollower)
+ .where(TopicFollower.follower == user.id)
+ .filter(TopicFollower.topic == tpc.id)
+ .first()
+ )
if not tf:
- tf = TopicFollower.create(
- topic=tpc.id,
- follower=user.id,
- auto=True
- )
+ tf = TopicFollower.create(topic=tpc.id, follower=user.id, auto=True)
session.add(tf)
session.commit()
except IntegrityError:
@@ -295,10 +302,7 @@ async def resolve_create_shout(shout_dict):
for key in shout_dict:
if key in s.__dict__:
if s.__dict__[key] != shout_dict[key]:
- print(
- "[migration] shout already exists, but differs in %s"
- % key
- )
+ print("[migration] shout already exists, but differs in %s" % key)
bump = True
else:
print("[migration] shout already exists, but lacks %s" % key)
@@ -344,9 +348,7 @@ async def topics_aftermath(entry, storage):
)
if not shout_topic_new:
try:
- ShoutTopic.create(
- **{"shout": shout.id, "topic": new_topic.id}
- )
+ ShoutTopic.create(**{"shout": shout.id, "topic": new_topic.id})
except Exception:
print("[migration] shout topic error: " + newslug)
session.commit()
@@ -363,9 +365,7 @@ async def content_ratings_to_reactions(entry, slug):
with local_session() as session:
for content_rating in entry.get("ratings", []):
rater = (
- session.query(User)
- .filter(User.oid == content_rating["createdBy"])
- .first()
+ session.query(User).filter(User.oid == content_rating["createdBy"]).first()
) or User.default_user
shout = session.query(Shout).where(Shout.slug == slug).first()
cts = content_rating.get("createdAt")
@@ -375,7 +375,7 @@ async def content_ratings_to_reactions(entry, slug):
if content_rating["value"] > 0
else ReactionKind.DISLIKE,
"createdBy": rater.id,
- "shout": shout.id
+ "shout": shout.id,
}
reaction = (
session.query(Reaction)
diff --git a/migration/tables/remarks.py b/migration/tables/remarks.py
index 026b95c6..09957ed4 100644
--- a/migration/tables/remarks.py
+++ b/migration/tables/remarks.py
@@ -12,27 +12,19 @@ def migrate(entry, storage):
print(shout_dict['body'])
remark = {
"shout": shout_dict['id'],
- "body": extract_md(
- html2text(entry['body']),
- shout_dict
- ),
- "kind": ReactionKind.REMARK
+ "body": extract_md(html2text(entry['body']), shout_dict),
+ "kind": ReactionKind.REMARK,
}
if entry.get('textBefore'):
- remark['range'] = str(
- shout_dict['body']
- .index(
- entry['textBefore'] or ''
- )
- ) + ':' + str(
- shout_dict['body']
- .index(
- entry['textAfter'] or ''
- ) + len(
- entry['textAfter'] or ''
- )
+ remark['range'] = (
+ str(shout_dict['body'].index(entry['textBefore'] or ''))
+ + ':'
+ + str(
+ shout_dict['body'].index(entry['textAfter'] or '')
+ + len(entry['textAfter'] or '')
)
+ )
with local_session() as session:
rmrk = Reaction.create(**remark)
diff --git a/migration/tables/topics.py b/migration/tables/topics.py
index 17804376..ae9ddbda 100644
--- a/migration/tables/topics.py
+++ b/migration/tables/topics.py
@@ -10,7 +10,7 @@ def migrate(entry):
"slug": entry["slug"],
"oid": entry["_id"],
"title": entry["title"].replace(" ", " "),
- "body": extract_md(html2text(body_orig))
+ "body": extract_md(html2text(body_orig)),
}
with local_session() as session:
diff --git a/migration/tables/users.py b/migration/tables/users.py
index 3ccf9029..46f2e825 100644
--- a/migration/tables/users.py
+++ b/migration/tables/users.py
@@ -23,7 +23,7 @@ def migrate(entry):
"muted": False, # amnesty
"links": [],
"name": "anonymous",
- "password": entry["services"]["password"].get("bcrypt")
+ "password": entry["services"]["password"].get("bcrypt"),
}
if "updatedAt" in entry:
@@ -35,7 +35,11 @@ def migrate(entry):
slug = entry["profile"].get("path").lower()
slug = re.sub('[^0-9a-zA-Z]+', '-', slug).strip()
user_dict["slug"] = slug
- bio = (entry.get("profile", {"bio": ""}).get("bio") or "").replace('\(', '(').replace('\)', ')')
+ bio = (
+ (entry.get("profile", {"bio": ""}).get("bio") or "")
+ .replace('\(', '(')
+ .replace('\)', ')')
+ )
bio_text = BeautifulSoup(bio, features="lxml").text
if len(bio_text) > 120:
@@ -46,8 +50,7 @@ def migrate(entry):
# userpic
try:
user_dict["userpic"] = (
- "https://images.discours.io/unsafe/"
- + entry["profile"]["thumborId"]
+ "https://images.discours.io/unsafe/" + entry["profile"]["thumborId"]
)
except KeyError:
try:
@@ -62,11 +65,7 @@ def migrate(entry):
name = (name + " " + ln) if ln else name
if not name:
name = slug if slug else "anonymous"
- name = (
- entry["profile"]["path"].lower().strip().replace(" ", "-")
- if len(name) < 2
- else name
- )
+ name = entry["profile"]["path"].lower().strip().replace(" ", "-") if len(name) < 2 else name
user_dict["name"] = name
# links
@@ -95,9 +94,7 @@ def migrate(entry):
except IntegrityError:
print("[migration] cannot create user " + user_dict["slug"])
with local_session() as session:
- old_user = (
- session.query(User).filter(User.slug == user_dict["slug"]).first()
- )
+ old_user = session.query(User).filter(User.slug == user_dict["slug"]).first()
old_user.oid = oid
old_user.password = user_dict["password"]
session.commit()
@@ -114,7 +111,7 @@ def post_migrate():
"slug": "old-discours",
"username": "old-discours",
"email": "old@discours.io",
- "name": "Просмотры на старой версии сайта"
+ "name": "Просмотры на старой версии сайта",
}
with local_session() as session:
@@ -148,11 +145,7 @@ def migrate_2stage(entry, id_map):
user_rating = UserRating.create(**user_rating_dict)
if user_rating_dict['value'] > 0:
- af = AuthorFollower.create(
- author=user.id,
- follower=rater.id,
- auto=True
- )
+ af = AuthorFollower.create(author=user.id, follower=rater.id, auto=True)
session.add(af)
session.add(user_rating)
session.commit()
diff --git a/orm/__init__.py b/orm/__init__.py
index 53b13951..9f66f85c 100644
--- a/orm/__init__.py
+++ b/orm/__init__.py
@@ -1,7 +1,7 @@
from base.orm import Base, engine
from orm.community import Community
from orm.notification import Notification
-from orm.rbac import Operation, Resource, Permission, Role
+from orm.rbac import Operation, Permission, Resource, Role
from orm.reaction import Reaction
from orm.shout import Shout
from orm.topic import Topic, TopicFollower
@@ -32,5 +32,5 @@ __all__ = [
"Notification",
"Reaction",
"UserRating",
- "init_tables"
+ "init_tables",
]
diff --git a/orm/community.py b/orm/community.py
index b55b857f..7045e1aa 100644
--- a/orm/community.py
+++ b/orm/community.py
@@ -1,6 +1,7 @@
from datetime import datetime
-from sqlalchemy import Column, String, ForeignKey, DateTime
+from sqlalchemy import Column, DateTime, ForeignKey, String
+
from base.orm import Base, local_session
@@ -10,9 +11,7 @@ class CommunityFollower(Base):
id = None # type: ignore
follower = Column(ForeignKey("user.id"), primary_key=True)
community = Column(ForeignKey("community.id"), primary_key=True)
- joinedAt = Column(
- DateTime, nullable=False, default=datetime.now, comment="Created at"
- )
+ joinedAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
# role = Column(ForeignKey(Role.id), nullable=False, comment="Role for member")
@@ -23,16 +22,12 @@ class Community(Base):
slug = Column(String, nullable=False, unique=True, comment="Slug")
desc = Column(String, nullable=False, default="")
pic = Column(String, nullable=False, default="")
- createdAt = Column(
- DateTime, nullable=False, default=datetime.now, comment="Created at"
- )
+ createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
@staticmethod
def init_table():
with local_session() as session:
- d = (
- session.query(Community).filter(Community.slug == "discours").first()
- )
+ d = session.query(Community).filter(Community.slug == "discours").first()
if not d:
d = Community.create(name="Дискурс", slug="discours")
session.add(d)
diff --git a/orm/notification.py b/orm/notification.py
index 25f4e4f3..a838ce6b 100644
--- a/orm/notification.py
+++ b/orm/notification.py
@@ -1,9 +1,10 @@
from datetime import datetime
-from sqlalchemy import Column, Enum, ForeignKey, DateTime, Boolean, Integer
+from enum import Enum as Enumeration
+
+from sqlalchemy import Boolean, Column, DateTime, Enum, ForeignKey, Integer
from sqlalchemy.dialects.postgresql import JSONB
from base.orm import Base
-from enum import Enum as Enumeration
class NotificationType(Enumeration):
diff --git a/orm/rbac.py b/orm/rbac.py
index 29ade72e..80914949 100644
--- a/orm/rbac.py
+++ b/orm/rbac.py
@@ -1,9 +1,9 @@
import warnings
-from sqlalchemy import String, Column, ForeignKey, UniqueConstraint, TypeDecorator
+from sqlalchemy import Column, ForeignKey, String, TypeDecorator, UniqueConstraint
from sqlalchemy.orm import relationship
-from base.orm import Base, REGISTRY, engine, local_session
+from base.orm import REGISTRY, Base, engine, local_session
# Role Based Access Control #
@@ -121,16 +121,23 @@ class Operation(Base):
class Resource(Base):
__tablename__ = "resource"
- resourceClass = Column(
- String, nullable=False, unique=True, comment="Resource class"
- )
+ resourceClass = Column(String, nullable=False, unique=True, comment="Resource class")
name = Column(String, nullable=False, unique=True, comment="Resource name")
# TODO: community = Column(ForeignKey())
@staticmethod
def init_table():
with local_session() as session:
- for res in ["shout", "topic", "reaction", "chat", "message", "invite", "community", "user"]:
+ for res in [
+ "shout",
+ "topic",
+ "reaction",
+ "chat",
+ "message",
+ "invite",
+ "community",
+ "user",
+ ]:
r = session.query(Resource).filter(Resource.name == res).first()
if not r:
r = Resource.create(name=res, resourceClass=res)
@@ -145,9 +152,7 @@ class Permission(Base):
{"extend_existing": True},
)
- role = Column(
- ForeignKey("role.id", ondelete="CASCADE"), nullable=False, comment="Role"
- )
+ role = Column(ForeignKey("role.id", ondelete="CASCADE"), nullable=False, comment="Role")
operation = Column(
ForeignKey("operation.id", ondelete="CASCADE"),
nullable=False,
diff --git a/orm/reaction.py b/orm/reaction.py
index 1c129e23..f3680b6d 100644
--- a/orm/reaction.py
+++ b/orm/reaction.py
@@ -27,18 +27,14 @@ class ReactionKind(Enumeration):
class Reaction(Base):
__tablename__ = "reaction"
body = Column(String, nullable=True, comment="Reaction Body")
- createdAt = Column(
- DateTime, nullable=False, default=datetime.now, comment="Created at"
- )
+ createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
createdBy = Column(ForeignKey("user.id"), nullable=False, index=True, comment="Sender")
updatedAt = Column(DateTime, nullable=True, comment="Updated at")
updatedBy = Column(ForeignKey("user.id"), nullable=True, index=True, comment="Last Editor")
deletedAt = Column(DateTime, nullable=True, comment="Deleted at")
deletedBy = Column(ForeignKey("user.id"), nullable=True, index=True, comment="Deleted by")
shout = Column(ForeignKey("shout.id"), nullable=False, index=True)
- replyTo = Column(
- ForeignKey("reaction.id"), nullable=True, comment="Reply to reaction ID"
- )
+ replyTo = Column(ForeignKey("reaction.id"), nullable=True, comment="Reply to reaction ID")
range = Column(String, nullable=True, comment="Range in format
:")
kind = Column(Enum(ReactionKind), nullable=False, comment="Reaction kind")
oid = Column(String, nullable=True, comment="Old ID")
diff --git a/orm/shout.py b/orm/shout.py
index 22381d4c..0d980b8a 100644
--- a/orm/shout.py
+++ b/orm/shout.py
@@ -1,6 +1,6 @@
from datetime import datetime
-from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, String, JSON
+from sqlalchemy import JSON, Boolean, Column, DateTime, ForeignKey, Integer, String
from sqlalchemy.orm import column_property, relationship
from base.orm import Base, local_session
@@ -24,9 +24,7 @@ class ShoutReactionsFollower(Base):
follower = Column(ForeignKey("user.id"), primary_key=True, index=True)
shout = Column(ForeignKey("shout.id"), primary_key=True, index=True)
auto = Column(Boolean, nullable=False, default=False)
- createdAt = Column(
- DateTime, nullable=False, default=datetime.now, comment="Created at"
- )
+ createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
deletedAt = Column(DateTime, nullable=True)
@@ -83,12 +81,7 @@ class Shout(Base):
with local_session() as session:
s = session.query(Shout).first()
if not s:
- entry = {
- "slug": "genesis-block",
- "body": "",
- "title": "Ничего",
- "lang": "ru"
- }
+ entry = {"slug": "genesis-block", "body": "", "title": "Ничего", "lang": "ru"}
s = Shout.create(**entry)
session.add(s)
session.commit()
diff --git a/orm/topic.py b/orm/topic.py
index a37dc69a..b0d7cc01 100644
--- a/orm/topic.py
+++ b/orm/topic.py
@@ -11,9 +11,7 @@ class TopicFollower(Base):
id = None # type: ignore
follower = Column(ForeignKey("user.id"), primary_key=True, index=True)
topic = Column(ForeignKey("topic.id"), primary_key=True, index=True)
- createdAt = Column(
- DateTime, nullable=False, default=datetime.now, comment="Created at"
- )
+ createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
auto = Column(Boolean, nullable=False, default=False)
@@ -24,7 +22,5 @@ class Topic(Base):
title = Column(String, nullable=False, comment="Title")
body = Column(String, nullable=True, comment="Body")
pic = Column(String, nullable=True, comment="Picture")
- community = Column(
- ForeignKey("community.id"), default=1, comment="Community"
- )
+ community = Column(ForeignKey("community.id"), default=1, comment="Community")
oid = Column(String, nullable=True, comment="Old ID")
diff --git a/orm/user.py b/orm/user.py
index 5aeab90e..d10be411 100644
--- a/orm/user.py
+++ b/orm/user.py
@@ -3,6 +3,7 @@ from datetime import datetime
from sqlalchemy import JSON as JSONType
from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, String
from sqlalchemy.orm import relationship
+
from base.orm import Base, local_session
from orm.rbac import Role
@@ -34,9 +35,7 @@ class AuthorFollower(Base):
id = None # type: ignore
follower = Column(ForeignKey("user.id"), primary_key=True, index=True)
author = Column(ForeignKey("user.id"), primary_key=True, index=True)
- createdAt = Column(
- DateTime, nullable=False, default=datetime.now, comment="Created at"
- )
+ createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
auto = Column(Boolean, nullable=False, default=False)
@@ -54,12 +53,8 @@ class User(Base):
slug = Column(String, unique=True, comment="User's slug")
muted = Column(Boolean, default=False)
emailConfirmed = Column(Boolean, default=False)
- createdAt = Column(
- DateTime, nullable=False, default=datetime.now, comment="Created at"
- )
- lastSeen = Column(
- DateTime, nullable=False, default=datetime.now, comment="Was online at"
- )
+ createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
+ lastSeen = Column(DateTime, nullable=False, default=datetime.now, comment="Was online at")
deletedAt = Column(DateTime, nullable=True, comment="Deleted at")
links = Column(JSONType, nullable=True, comment="Links")
oauth = Column(String, nullable=True)
diff --git a/requirements-dev.txt b/requirements-dev.txt
index d221f3b0..b2e99a01 100755
--- a/requirements-dev.txt
+++ b/requirements-dev.txt
@@ -2,3 +2,4 @@ isort
brunette
flake8
mypy
+pre-commit
diff --git a/resetdb.sh b/resetdb.sh
index 39b3b9b2..40ba2e37 100755
--- a/resetdb.sh
+++ b/resetdb.sh
@@ -53,4 +53,3 @@ echo "Start migration"
python3 server.py migrate
if [ $? -ne 0 ]; then { echo "Migration failed, aborting." ; exit 1; } fi
echo 'Done!'
-
diff --git a/resolvers/__init__.py b/resolvers/__init__.py
index 5d753ac4..78ae7e22 100644
--- a/resolvers/__init__.py
+++ b/resolvers/__init__.py
@@ -1,67 +1,35 @@
from resolvers.auth import (
- login,
- sign_out,
- is_email_used,
- register_by_email,
- confirm_email,
auth_send_link,
+ confirm_email,
get_current_user,
+ is_email_used,
+ login,
+ register_by_email,
+ sign_out,
)
-
-from resolvers.create.migrate import markdown_body
from resolvers.create.editor import create_shout, delete_shout, update_shout
-
-from resolvers.zine.profile import (
- load_authors_by,
- rate_user,
- update_profile,
- get_authors_all
-)
-
+from resolvers.create.migrate import markdown_body
+from resolvers.inbox.chats import create_chat, delete_chat, update_chat
+from resolvers.inbox.load import load_chats, load_messages_by, load_recipients
+from resolvers.inbox.messages import create_message, delete_message, mark_as_read, update_message
+from resolvers.inbox.search import search_recipients
+from resolvers.notifications import load_notifications
+from resolvers.zine.following import follow, unfollow
+from resolvers.zine.load import load_shout, load_shouts_by
+from resolvers.zine.profile import get_authors_all, load_authors_by, rate_user, update_profile
from resolvers.zine.reactions import (
create_reaction,
delete_reaction,
- update_reaction,
- reactions_unfollow,
+ load_reactions_by,
reactions_follow,
- load_reactions_by
+ reactions_unfollow,
+ update_reaction,
)
from resolvers.zine.topics import (
+ get_topic,
topic_follow,
topic_unfollow,
+ topics_all,
topics_by_author,
topics_by_community,
- topics_all,
- get_topic
)
-
-from resolvers.zine.following import (
- follow,
- unfollow
-)
-
-from resolvers.zine.load import (
- load_shout,
- load_shouts_by
-)
-
-from resolvers.inbox.chats import (
- create_chat,
- delete_chat,
- update_chat
-
-)
-from resolvers.inbox.messages import (
- create_message,
- delete_message,
- update_message,
- mark_as_read
-)
-from resolvers.inbox.load import (
- load_chats,
- load_messages_by,
- load_recipients
-)
-from resolvers.inbox.search import search_recipients
-
-from resolvers.notifications import load_notifications
diff --git a/resolvers/auth.py b/resolvers/auth.py
index 17369b7a..c28898e3 100644
--- a/resolvers/auth.py
+++ b/resolvers/auth.py
@@ -1,24 +1,30 @@
# -*- coding: utf-8 -*-
+import re
from datetime import datetime, timezone
from urllib.parse import quote_plus
from graphql.type import GraphQLResolveInfo
from starlette.responses import RedirectResponse
from transliterate import translit
-import re
+
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
from auth.email import send_auth_email
from auth.identity import Identity, Password
from auth.jwtcodec import JWTCodec
from auth.tokenstorage import TokenStorage
-from base.exceptions import (BaseHttpException, InvalidPassword, InvalidToken,
- ObjectNotExist, Unauthorized)
+from base.exceptions import (
+ BaseHttpException,
+ InvalidPassword,
+ InvalidToken,
+ ObjectNotExist,
+ Unauthorized,
+)
from base.orm import local_session
from base.resolvers import mutation, query
from orm import Role, User
-from settings import SESSION_TOKEN_HEADER, FRONTEND_URL
+from settings import FRONTEND_URL, SESSION_TOKEN_HEADER
@mutation.field("getSession")
@@ -32,10 +38,7 @@ async def get_current_user(_, info):
user.lastSeen = datetime.now(tz=timezone.utc)
session.commit()
- return {
- "token": token,
- "user": user
- }
+ return {"token": token, "user": user}
@mutation.field("confirmEmail")
@@ -53,10 +56,7 @@ async def confirm_email(_, info, token):
user.lastSeen = datetime.now(tz=timezone.utc)
session.add(user)
session.commit()
- return {
- "token": session_token,
- "user": user
- }
+ return {"token": session_token, "user": user}
except InvalidToken as e:
raise InvalidToken(e.message)
except Exception as e:
@@ -122,7 +122,7 @@ async def register_by_email(_, _info, email: str, password: str = "", name: str
"email": email,
"username": email, # will be used to store phone number or some messenger network id
"name": name,
- "slug": slug
+ "slug": slug,
}
if password:
user_dict["password"] = Password.encode(password)
@@ -172,10 +172,7 @@ async def login(_, info, email: str, password: str = "", lang: str = "ru"):
user = Identity.password(orm_user, password)
session_token = await TokenStorage.create_session(user)
print(f"[auth] user {email} authorized")
- return {
- "token": session_token,
- "user": user
- }
+ return {"token": session_token, "user": user}
except InvalidPassword:
print(f"[auth] {email}: invalid password")
raise InvalidPassword("invalid password") # contains webserver status
diff --git a/resolvers/create/editor.py b/resolvers/create/editor.py
index c81ff404..d6db8bf6 100644
--- a/resolvers/create/editor.py
+++ b/resolvers/create/editor.py
@@ -20,19 +20,21 @@ async def create_shout(_, info, inp):
with local_session() as session:
topics = session.query(Topic).filter(Topic.slug.in_(inp.get('topics', []))).all()
- new_shout = Shout.create(**{
- "title": inp.get("title"),
- "subtitle": inp.get('subtitle'),
- "lead": inp.get('lead'),
- "description": inp.get('description'),
- "body": inp.get("body", ''),
- "layout": inp.get("layout"),
- "authors": inp.get("authors", []),
- "slug": inp.get("slug"),
- "mainTopic": inp.get("mainTopic"),
- "visibility": "owner",
- "createdBy": auth.user_id
- })
+ new_shout = Shout.create(
+ **{
+ "title": inp.get("title"),
+ "subtitle": inp.get('subtitle'),
+ "lead": inp.get('lead'),
+ "description": inp.get('description'),
+ "body": inp.get("body", ''),
+ "layout": inp.get("layout"),
+ "authors": inp.get("authors", []),
+ "slug": inp.get("slug"),
+ "mainTopic": inp.get("mainTopic"),
+ "visibility": "owner",
+ "createdBy": auth.user_id,
+ }
+ )
for topic in topics:
t = ShoutTopic.create(topic=topic.id, shout=new_shout.id)
@@ -64,10 +66,15 @@ async def update_shout(_, info, shout_id, shout_input=None, publish=False):
auth: AuthCredentials = info.context["request"].auth
with local_session() as session:
- shout = session.query(Shout).options(
- joinedload(Shout.authors),
- joinedload(Shout.topics),
- ).filter(Shout.id == shout_id).first()
+ shout = (
+ session.query(Shout)
+ .options(
+ joinedload(Shout.authors),
+ joinedload(Shout.topics),
+ )
+ .filter(Shout.id == shout_id)
+ .first()
+ )
if not shout:
return {"error": "shout not found"}
@@ -94,25 +101,34 @@ async def update_shout(_, info, shout_id, shout_input=None, publish=False):
session.commit()
for new_topic_to_link in new_topics_to_link:
- created_unlinked_topic = ShoutTopic.create(shout=shout.id, topic=new_topic_to_link.id)
+ created_unlinked_topic = ShoutTopic.create(
+ shout=shout.id, topic=new_topic_to_link.id
+ )
session.add(created_unlinked_topic)
- existing_topics_input = [topic_input for topic_input in topics_input if topic_input.get("id", 0) > 0]
- existing_topic_to_link_ids = [existing_topic_input["id"] for existing_topic_input in existing_topics_input
- if existing_topic_input["id"] not in [topic.id for topic in shout.topics]]
+ existing_topics_input = [
+ topic_input for topic_input in topics_input if topic_input.get("id", 0) > 0
+ ]
+ existing_topic_to_link_ids = [
+ existing_topic_input["id"]
+ for existing_topic_input in existing_topics_input
+ if existing_topic_input["id"] not in [topic.id for topic in shout.topics]
+ ]
for existing_topic_to_link_id in existing_topic_to_link_ids:
- created_unlinked_topic = ShoutTopic.create(shout=shout.id, topic=existing_topic_to_link_id)
+ created_unlinked_topic = ShoutTopic.create(
+ shout=shout.id, topic=existing_topic_to_link_id
+ )
session.add(created_unlinked_topic)
- topic_to_unlink_ids = [topic.id for topic in shout.topics
- if topic.id not in [topic_input["id"] for topic_input in existing_topics_input]]
+ topic_to_unlink_ids = [
+ topic.id
+ for topic in shout.topics
+ if topic.id not in [topic_input["id"] for topic_input in existing_topics_input]
+ ]
shout_topics_to_remove = session.query(ShoutTopic).filter(
- and_(
- ShoutTopic.shout == shout.id,
- ShoutTopic.topic.in_(topic_to_unlink_ids)
- )
+ and_(ShoutTopic.shout == shout.id, ShoutTopic.topic.in_(topic_to_unlink_ids))
)
for shout_topic_to_remove in shout_topics_to_remove:
diff --git a/resolvers/create/migrate.py b/resolvers/create/migrate.py
index f16341f0..9e849f86 100644
--- a/resolvers/create/migrate.py
+++ b/resolvers/create/migrate.py
@@ -1,7 +1,6 @@
-
from base.resolvers import query
-from resolvers.auth import login_required
from migration.extract import extract_md
+from resolvers.auth import login_required
@login_required
diff --git a/resolvers/inbox/chats.py b/resolvers/inbox/chats.py
index 853defab..a589e870 100644
--- a/resolvers/inbox/chats.py
+++ b/resolvers/inbox/chats.py
@@ -24,27 +24,24 @@ async def update_chat(_, info, chat_new: Chat):
chat_id = chat_new["id"]
chat = await redis.execute("GET", f"chats/{chat_id}")
if not chat:
- return {
- "error": "chat not exist"
- }
+ return {"error": "chat not exist"}
chat = dict(json.loads(chat))
# TODO
if auth.user_id in chat["admins"]:
- chat.update({
- "title": chat_new.get("title", chat["title"]),
- "description": chat_new.get("description", chat["description"]),
- "updatedAt": int(datetime.now(tz=timezone.utc).timestamp()),
- "admins": chat_new.get("admins", chat.get("admins") or []),
- "users": chat_new.get("users", chat["users"])
- })
+ chat.update(
+ {
+ "title": chat_new.get("title", chat["title"]),
+ "description": chat_new.get("description", chat["description"]),
+ "updatedAt": int(datetime.now(tz=timezone.utc).timestamp()),
+ "admins": chat_new.get("admins", chat.get("admins") or []),
+ "users": chat_new.get("users", chat["users"]),
+ }
+ )
await redis.execute("SET", f"chats/{chat.id}", json.dumps(chat))
await redis.execute("COMMIT")
- return {
- "error": None,
- "chat": chat
- }
+ return {"error": None, "chat": chat}
@mutation.field("createChat")
@@ -79,10 +76,7 @@ async def create_chat(_, info, title="", members=[]):
print(chat)
break
if chat:
- return {
- "chat": chat,
- "error": "existed"
- }
+ return {"chat": chat, "error": "existed"}
chat_id = str(uuid.uuid4())
chat = {
@@ -92,7 +86,7 @@ async def create_chat(_, info, title="", members=[]):
"createdBy": auth.user_id,
"createdAt": int(datetime.now(tz=timezone.utc).timestamp()),
"updatedAt": int(datetime.now(tz=timezone.utc).timestamp()),
- "admins": members if (len(members) == 2 and title == "") else []
+ "admins": members if (len(members) == 2 and title == "") else [],
}
for m in members:
@@ -100,10 +94,7 @@ async def create_chat(_, info, title="", members=[]):
await redis.execute("SET", f"chats/{chat_id}", json.dumps(chat))
await redis.execute("SET", f"chats/{chat_id}/next_message_id", str(0))
await redis.execute("COMMIT")
- return {
- "error": None,
- "chat": chat
- }
+ return {"error": None, "chat": chat}
@mutation.field("deleteChat")
@@ -119,6 +110,4 @@ async def delete_chat(_, info, chat_id: str):
await redis.execute("SREM", "chats_by_user/" + str(auth.user_id), chat_id)
await redis.execute("COMMIT")
else:
- return {
- "error": "chat not exist"
- }
+ return {"error": "chat not exist"}
diff --git a/resolvers/inbox/load.py b/resolvers/inbox/load.py
index a0d41721..43f8a07c 100644
--- a/resolvers/inbox/load.py
+++ b/resolvers/inbox/load.py
@@ -1,28 +1,27 @@
import json
-# from datetime import datetime, timedelta, timezone
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
-from base.redis import redis
from base.orm import local_session
+from base.redis import redis
from base.resolvers import query
from orm.user import User
from resolvers.zine.profile import followed_authors
+
from .unread import get_unread_counter
+# from datetime import datetime, timedelta, timezone
+
async def load_messages(chat_id: str, limit: int = 5, offset: int = 0, ids=[]):
- ''' load :limit messages for :chat_id with :offset '''
+ '''load :limit messages for :chat_id with :offset'''
messages = []
message_ids = []
if ids:
message_ids += ids
try:
if limit:
- mids = await redis.lrange(f"chats/{chat_id}/message_ids",
- offset,
- offset + limit
- )
+ mids = await redis.lrange(f"chats/{chat_id}/message_ids", offset, offset + limit)
mids = [mid.decode("utf-8") for mid in mids]
message_ids += mids
except Exception as e:
@@ -46,12 +45,12 @@ async def load_messages(chat_id: str, limit: int = 5, offset: int = 0, ids=[]):
@query.field("loadChats")
@login_required
async def load_chats(_, info, limit: int = 50, offset: int = 0):
- """ load :limit chats of current user with :offset """
+ """load :limit chats of current user with :offset"""
auth: AuthCredentials = info.context["request"].auth
cids = await redis.execute("SMEMBERS", "chats_by_user/" + str(auth.user_id))
if cids:
- cids = list(cids)[offset:offset + limit]
+ cids = list(cids)[offset : offset + limit]
if not cids:
print('[inbox.load] no chats were found')
cids = []
@@ -71,25 +70,24 @@ async def load_chats(_, info, limit: int = 50, offset: int = 0):
for uid in c["users"]:
a = session.query(User).where(User.id == uid).first()
if a:
- c['members'].append({
- "id": a.id,
- "slug": a.slug,
- "userpic": a.userpic,
- "name": a.name,
- "lastSeen": a.lastSeen,
- "online": a.id in onliners
- })
+ c['members'].append(
+ {
+ "id": a.id,
+ "slug": a.slug,
+ "userpic": a.userpic,
+ "name": a.name,
+ "lastSeen": a.lastSeen,
+ "online": a.id in onliners,
+ }
+ )
chats.append(c)
- return {
- "chats": chats,
- "error": None
- }
+ return {"chats": chats, "error": None}
@query.field("loadMessagesBy")
@login_required
async def load_messages_by(_, info, by, limit: int = 10, offset: int = 0):
- ''' load :limit messages of :chat_id with :offset '''
+ '''load :limit messages of :chat_id with :offset'''
auth: AuthCredentials = info.context["request"].auth
userchats = await redis.execute("SMEMBERS", "chats_by_user/" + str(auth.user_id))
@@ -103,23 +101,12 @@ async def load_messages_by(_, info, by, limit: int = 10, offset: int = 0):
chat = await redis.execute("GET", f"chats/{by_chat}")
# print(chat)
if not chat:
- return {
- "messages": [],
- "error": "chat not exist"
- }
+ return {"messages": [], "error": "chat not exist"}
# everyone's messages in filtered chat
messages = await load_messages(by_chat, limit, offset)
- return {
- "messages": sorted(
- list(messages),
- key=lambda m: m['createdAt']
- ),
- "error": None
- }
+ return {"messages": sorted(list(messages), key=lambda m: m['createdAt']), "error": None}
else:
- return {
- "error": "Cannot access messages of this chat"
- }
+ return {"error": "Cannot access messages of this chat"}
@query.field("loadRecipients")
@@ -138,15 +125,14 @@ async def load_recipients(_, info, limit=50, offset=0):
chat_users += session.query(User).where(User.emailConfirmed).limit(limit).offset(offset)
members = []
for a in chat_users:
- members.append({
- "id": a.id,
- "slug": a.slug,
- "userpic": a.userpic,
- "name": a.name,
- "lastSeen": a.lastSeen,
- "online": a.id in onliners
- })
- return {
- "members": members,
- "error": None
- }
+ members.append(
+ {
+ "id": a.id,
+ "slug": a.slug,
+ "userpic": a.userpic,
+ "name": a.name,
+ "lastSeen": a.lastSeen,
+ "online": a.id in onliners,
+ }
+ )
+ return {"members": members, "error": None}
diff --git a/resolvers/inbox/messages.py b/resolvers/inbox/messages.py
index 56187edf..3d35105a 100644
--- a/resolvers/inbox/messages.py
+++ b/resolvers/inbox/messages.py
@@ -1,27 +1,27 @@
import asyncio
import json
-from typing import Any
from datetime import datetime, timezone
+from typing import Any
+
from graphql.type import GraphQLResolveInfo
+
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
from base.redis import redis
from base.resolvers import mutation
-from services.following import FollowingManager, FollowingResult, Following
+from services.following import Following, FollowingManager, FollowingResult
from validations.inbox import Message
@mutation.field("createMessage")
@login_required
async def create_message(_, info, chat: str, body: str, replyTo=None):
- """ create message with :body for :chat_id replying to :replyTo optionally """
+ """create message with :body for :chat_id replying to :replyTo optionally"""
auth: AuthCredentials = info.context["request"].auth
chat = await redis.execute("GET", f"chats/{chat}")
if not chat:
- return {
- "error": "chat is not exist"
- }
+ return {"error": "chat is not exist"}
else:
chat = dict(json.loads(chat))
message_id = await redis.execute("GET", f"chats/{chat['id']}/next_message_id")
@@ -31,7 +31,7 @@ async def create_message(_, info, chat: str, body: str, replyTo=None):
"id": message_id,
"author": auth.user_id,
"body": body,
- "createdAt": int(datetime.now(tz=timezone.utc).timestamp())
+ "createdAt": int(datetime.now(tz=timezone.utc).timestamp()),
}
if replyTo:
new_message['replyTo'] = replyTo
@@ -46,17 +46,12 @@ async def create_message(_, info, chat: str, body: str, replyTo=None):
users = chat["users"]
for user_slug in users:
- await redis.execute(
- "LPUSH", f"chats/{chat['id']}/unread/{user_slug}", str(message_id)
- )
+ await redis.execute("LPUSH", f"chats/{chat['id']}/unread/{user_slug}", str(message_id))
result = FollowingResult("NEW", 'chat', new_message)
await FollowingManager.push('chat', result)
- return {
- "message": new_message,
- "error": None
- }
+ return {"message": new_message, "error": None}
@mutation.field("updateMessage")
@@ -84,10 +79,7 @@ async def update_message(_, info, chat_id: str, message_id: int, body: str):
result = FollowingResult("UPDATED", 'chat', message)
await FollowingManager.push('chat', result)
- return {
- "message": message,
- "error": None
- }
+ return {"message": message, "error": None}
@mutation.field("deleteMessage")
@@ -137,6 +129,4 @@ async def mark_as_read(_, info, chat_id: str, messages: [int]):
for message_id in messages:
await redis.execute("LREM", f"chats/{chat_id}/unread/{auth.user_id}", 0, str(message_id))
- return {
- "error": None
- }
+ return {"error": None}
diff --git a/resolvers/inbox/search.py b/resolvers/inbox/search.py
index 1ca340e5..8a3f0c2d 100644
--- a/resolvers/inbox/search.py
+++ b/resolvers/inbox/search.py
@@ -1,10 +1,11 @@
import json
-from datetime import datetime, timezone, timedelta
+from datetime import datetime, timedelta, timezone
+
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
+from base.orm import local_session
from base.redis import redis
from base.resolvers import query
-from base.orm import local_session
from orm.user import AuthorFollower, User
from resolvers.inbox.load import load_messages
@@ -17,7 +18,7 @@ async def search_recipients(_, info, query: str, limit: int = 50, offset: int =
auth: AuthCredentials = info.context["request"].auth
talk_before = await redis.execute("GET", f"/chats_by_user/{auth.user_id}")
if talk_before:
- talk_before = list(json.loads(talk_before))[offset:offset + limit]
+ talk_before = list(json.loads(talk_before))[offset : offset + limit]
for chat_id in talk_before:
members = await redis.execute("GET", f"/chats/{chat_id}/users")
if members:
@@ -31,23 +32,24 @@ async def search_recipients(_, info, query: str, limit: int = 50, offset: int =
with local_session() as session:
# followings
- result += session.query(AuthorFollower.author).join(
- User, User.id == AuthorFollower.follower
- ).where(
- User.slug.startswith(query)
- ).offset(offset + len(result)).limit(more_amount)
+ result += (
+ session.query(AuthorFollower.author)
+ .join(User, User.id == AuthorFollower.follower)
+ .where(User.slug.startswith(query))
+ .offset(offset + len(result))
+ .limit(more_amount)
+ )
more_amount = limit
# followers
- result += session.query(AuthorFollower.follower).join(
- User, User.id == AuthorFollower.author
- ).where(
- User.slug.startswith(query)
- ).offset(offset + len(result)).limit(offset + len(result) + limit)
- return {
- "members": list(result),
- "error": None
- }
+ result += (
+ session.query(AuthorFollower.follower)
+ .join(User, User.id == AuthorFollower.author)
+ .where(User.slug.startswith(query))
+ .offset(offset + len(result))
+ .limit(offset + len(result) + limit)
+ )
+ return {"members": list(result), "error": None}
@query.field("searchMessages")
@@ -83,13 +85,12 @@ async def search_user_chats(by, messages, user_id: int, limit, offset):
days = by.get("days")
if days:
- messages.extend(filter(
- list(messages),
- key=lambda m: (
- datetime.now(tz=timezone.utc) - int(m["createdAt"]) < timedelta(days=by["days"])
+ messages.extend(
+ filter(
+ list(messages),
+ key=lambda m: (
+ datetime.now(tz=timezone.utc) - int(m["createdAt"]) < timedelta(days=by["days"])
+ ),
)
- ))
- return {
- "messages": messages,
- "error": None
- }
+ )
+ return {"messages": messages, "error": None}
diff --git a/resolvers/notifications.py b/resolvers/notifications.py
index 0cfc2244..98314d2e 100644
--- a/resolvers/notifications.py
+++ b/resolvers/notifications.py
@@ -1,9 +1,9 @@
-from sqlalchemy import select, desc, and_, update
+from sqlalchemy import and_, desc, select, update
-from auth.credentials import AuthCredentials
-from base.resolvers import query, mutation
from auth.authenticate import login_required
+from auth.credentials import AuthCredentials
from base.orm import local_session
+from base.resolvers import mutation, query
from orm import Notification
@@ -19,22 +19,23 @@ async def load_notifications(_, info, params=None):
limit = params.get('limit', 50)
offset = params.get('offset', 0)
- q = select(Notification).where(
- Notification.user == user_id
- ).order_by(desc(Notification.createdAt)).limit(limit).offset(offset)
+ q = (
+ select(Notification)
+ .where(Notification.user == user_id)
+ .order_by(desc(Notification.createdAt))
+ .limit(limit)
+ .offset(offset)
+ )
notifications = []
with local_session() as session:
- total_count = session.query(Notification).where(
- Notification.user == user_id
- ).count()
+ total_count = session.query(Notification).where(Notification.user == user_id).count()
- total_unread_count = session.query(Notification).where(
- and_(
- Notification.user == user_id,
- Notification.seen == False
- )
- ).count()
+ total_unread_count = (
+ session.query(Notification)
+ .where(and_(Notification.user == user_id, Notification.seen == False))
+ .count()
+ )
for [notification] in session.execute(q):
notification.type = notification.type.name
@@ -43,7 +44,7 @@ async def load_notifications(_, info, params=None):
return {
"notifications": notifications,
"totalCount": total_count,
- "totalUnreadCount": total_unread_count
+ "totalUnreadCount": total_unread_count,
}
@@ -54,9 +55,11 @@ async def mark_notification_as_read(_, info, notification_id: int):
user_id = auth.user_id
with local_session() as session:
- notification = session.query(Notification).where(
- and_(Notification.id == notification_id, Notification.user == user_id)
- ).one()
+ notification = (
+ session.query(Notification)
+ .where(and_(Notification.id == notification_id, Notification.user == user_id))
+ .one()
+ )
notification.seen = True
session.commit()
@@ -69,12 +72,11 @@ async def mark_all_notifications_as_read(_, info):
auth: AuthCredentials = info.context["request"].auth
user_id = auth.user_id
- statement = update(Notification).where(
- and_(
- Notification.user == user_id,
- Notification.seen == False
- )
- ).values(seen=True)
+ statement = (
+ update(Notification)
+ .where(and_(Notification.user == user_id, Notification.seen == False))
+ .values(seen=True)
+ )
with local_session() as session:
try:
diff --git a/resolvers/upload.py b/resolvers/upload.py
index 44c7b81c..3eee3358 100644
--- a/resolvers/upload.py
+++ b/resolvers/upload.py
@@ -2,6 +2,7 @@ import os
import shutil
import tempfile
import uuid
+
import boto3
from botocore.exceptions import BotoCoreError, ClientError
from starlette.responses import JSONResponse
@@ -25,10 +26,12 @@ async def upload_handler(request):
key = 'files/' + str(uuid.uuid4()) + file_extension
# Create an S3 client with Storj configuration
- s3 = boto3.client('s3',
- aws_access_key_id=STORJ_ACCESS_KEY,
- aws_secret_access_key=STORJ_SECRET_KEY,
- endpoint_url=STORJ_END_POINT)
+ s3 = boto3.client(
+ 's3',
+ aws_access_key_id=STORJ_ACCESS_KEY,
+ aws_secret_access_key=STORJ_SECRET_KEY,
+ endpoint_url=STORJ_END_POINT,
+ )
try:
# Save the uploaded file to a temporary file
@@ -39,9 +42,7 @@ async def upload_handler(request):
Filename=tmp_file.name,
Bucket=STORJ_BUCKET_NAME,
Key=key,
- ExtraArgs={
- "ContentType": file.content_type
- }
+ ExtraArgs={"ContentType": file.content_type},
)
url = 'https://' + CDN_DOMAIN + '/' + key
@@ -51,6 +52,3 @@ async def upload_handler(request):
except (BotoCoreError, ClientError) as e:
print(e)
return JSONResponse({'error': 'Failed to upload file'}, status_code=500)
-
-
-
diff --git a/resolvers/zine/following.py b/resolvers/zine/following.py
index 99481571..24935d5e 100644
--- a/resolvers/zine/following.py
+++ b/resolvers/zine/following.py
@@ -1,17 +1,20 @@
import asyncio
-from base.orm import local_session
-from base.resolvers import mutation
+
+from graphql.type import GraphQLResolveInfo
+
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
+from base.orm import local_session
+from base.resolvers import mutation
+from orm.shout import ShoutReactionsFollower
+from orm.topic import TopicFollower
+
# from resolvers.community import community_follow, community_unfollow
from orm.user import AuthorFollower
-from orm.topic import TopicFollower
-from orm.shout import ShoutReactionsFollower
from resolvers.zine.profile import author_follow, author_unfollow
from resolvers.zine.reactions import reactions_follow, reactions_unfollow
from resolvers.zine.topics import topic_follow, topic_unfollow
from services.following import Following, FollowingManager, FollowingResult
-from graphql.type import GraphQLResolveInfo
@mutation.field("follow")
diff --git a/resolvers/zine/load.py b/resolvers/zine/load.py
index 4619efa6..06f400fc 100644
--- a/resolvers/zine/load.py
+++ b/resolvers/zine/load.py
@@ -1,7 +1,7 @@
from datetime import datetime, timedelta, timezone
-from sqlalchemy.orm import joinedload, aliased
-from sqlalchemy.sql.expression import desc, asc, select, func, case, and_, text, nulls_last
+from sqlalchemy.orm import aliased, joinedload
+from sqlalchemy.sql.expression import and_, asc, case, desc, func, nulls_last, select, text
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
@@ -18,32 +18,32 @@ def add_stat_columns(q):
aliased_reaction = aliased(Reaction)
q = q.outerjoin(aliased_reaction).add_columns(
- func.sum(
- aliased_reaction.id
- ).label('reacted_stat'),
+ func.sum(aliased_reaction.id).label('reacted_stat'),
+ func.sum(case((aliased_reaction.kind == ReactionKind.COMMENT, 1), else_=0)).label(
+ 'commented_stat'
+ ),
func.sum(
case(
- (aliased_reaction.kind == ReactionKind.COMMENT, 1),
- else_=0
+ # do not count comments' reactions
+ (aliased_reaction.replyTo.is_not(None), 0),
+ (aliased_reaction.kind == ReactionKind.AGREE, 1),
+ (aliased_reaction.kind == ReactionKind.DISAGREE, -1),
+ (aliased_reaction.kind == ReactionKind.PROOF, 1),
+ (aliased_reaction.kind == ReactionKind.DISPROOF, -1),
+ (aliased_reaction.kind == ReactionKind.ACCEPT, 1),
+ (aliased_reaction.kind == ReactionKind.REJECT, -1),
+ (aliased_reaction.kind == ReactionKind.LIKE, 1),
+ (aliased_reaction.kind == ReactionKind.DISLIKE, -1),
+ else_=0,
)
- ).label('commented_stat'),
- func.sum(case(
- # do not count comments' reactions
- (aliased_reaction.replyTo.is_not(None), 0),
- (aliased_reaction.kind == ReactionKind.AGREE, 1),
- (aliased_reaction.kind == ReactionKind.DISAGREE, -1),
- (aliased_reaction.kind == ReactionKind.PROOF, 1),
- (aliased_reaction.kind == ReactionKind.DISPROOF, -1),
- (aliased_reaction.kind == ReactionKind.ACCEPT, 1),
- (aliased_reaction.kind == ReactionKind.REJECT, -1),
- (aliased_reaction.kind == ReactionKind.LIKE, 1),
- (aliased_reaction.kind == ReactionKind.DISLIKE, -1),
- else_=0)
).label('rating_stat'),
- func.max(case(
- (aliased_reaction.kind != ReactionKind.COMMENT, None),
- else_=aliased_reaction.createdAt
- )).label('last_comment'))
+ func.max(
+ case(
+ (aliased_reaction.kind != ReactionKind.COMMENT, None),
+ else_=aliased_reaction.createdAt,
+ )
+ ).label('last_comment'),
+ )
return q
@@ -87,27 +87,23 @@ async def load_shout(_, info, slug=None, shout_id=None):
q = add_stat_columns(q)
if slug is not None:
- q = q.filter(
- Shout.slug == slug
- )
+ q = q.filter(Shout.slug == slug)
if shout_id is not None:
- q = q.filter(
- Shout.id == shout_id
- )
+ q = q.filter(Shout.id == shout_id)
- q = q.filter(
- Shout.deletedAt.is_(None)
- ).group_by(Shout.id)
+ q = q.filter(Shout.deletedAt.is_(None)).group_by(Shout.id)
try:
- [shout, reacted_stat, commented_stat, rating_stat, last_comment] = session.execute(q).first()
+ [shout, reacted_stat, commented_stat, rating_stat, last_comment] = session.execute(
+ q
+ ).first()
shout.stat = {
"viewed": shout.views,
"reacted": reacted_stat,
"commented": commented_stat,
- "rating": rating_stat
+ "rating": rating_stat,
}
for author_caption in session.query(ShoutAuthor).join(Shout).where(Shout.slug == slug):
@@ -142,14 +138,13 @@ async def load_shouts_by(_, info, options):
:return: Shout[]
"""
- q = select(Shout).options(
- joinedload(Shout.authors),
- joinedload(Shout.topics),
- ).where(
- and_(
- Shout.deletedAt.is_(None),
- Shout.layout.is_not(None)
+ q = (
+ select(Shout)
+ .options(
+ joinedload(Shout.authors),
+ joinedload(Shout.topics),
)
+ .where(and_(Shout.deletedAt.is_(None), Shout.layout.is_not(None)))
)
q = add_stat_columns(q)
@@ -169,13 +164,15 @@ async def load_shouts_by(_, info, options):
with local_session() as session:
shouts_map = {}
- for [shout, reacted_stat, commented_stat, rating_stat, last_comment] in session.execute(q).unique():
+ for [shout, reacted_stat, commented_stat, rating_stat, last_comment] in session.execute(
+ q
+ ).unique():
shouts.append(shout)
shout.stat = {
"viewed": shout.views,
"reacted": reacted_stat,
"commented": commented_stat,
- "rating": rating_stat
+ "rating": rating_stat,
}
shouts_map[shout.id] = shout
@@ -188,11 +185,13 @@ async def get_drafts(_, info):
auth: AuthCredentials = info.context["request"].auth
user_id = auth.user_id
- q = select(Shout).options(
- joinedload(Shout.authors),
- joinedload(Shout.topics),
- ).where(
- and_(Shout.deletedAt.is_(None), Shout.createdBy == user_id)
+ q = (
+ select(Shout)
+ .options(
+ joinedload(Shout.authors),
+ joinedload(Shout.topics),
+ )
+ .where(and_(Shout.deletedAt.is_(None), Shout.createdBy == user_id))
)
q = q.group_by(Shout.id)
@@ -211,24 +210,22 @@ async def get_my_feed(_, info, options):
auth: AuthCredentials = info.context["request"].auth
user_id = auth.user_id
- subquery = select(Shout.id).join(
- ShoutAuthor
- ).join(
- AuthorFollower, AuthorFollower.follower == user_id
- ).join(
- ShoutTopic
- ).join(
- TopicFollower, TopicFollower.follower == user_id
+ subquery = (
+ select(Shout.id)
+ .join(ShoutAuthor)
+ .join(AuthorFollower, AuthorFollower.follower == user_id)
+ .join(ShoutTopic)
+ .join(TopicFollower, TopicFollower.follower == user_id)
)
- q = select(Shout).options(
- joinedload(Shout.authors),
- joinedload(Shout.topics),
- ).where(
- and_(
- Shout.publishedAt.is_not(None),
- Shout.deletedAt.is_(None),
- Shout.id.in_(subquery)
+ q = (
+ select(Shout)
+ .options(
+ joinedload(Shout.authors),
+ joinedload(Shout.topics),
+ )
+ .where(
+ and_(Shout.publishedAt.is_not(None), Shout.deletedAt.is_(None), Shout.id.in_(subquery))
)
)
@@ -246,13 +243,15 @@ async def get_my_feed(_, info, options):
shouts = []
with local_session() as session:
shouts_map = {}
- for [shout, reacted_stat, commented_stat, rating_stat, last_comment] in session.execute(q).unique():
+ for [shout, reacted_stat, commented_stat, rating_stat, last_comment] in session.execute(
+ q
+ ).unique():
shouts.append(shout)
shout.stat = {
"viewed": shout.views,
"reacted": reacted_stat,
"commented": commented_stat,
- "rating": rating_stat
+ "rating": rating_stat,
}
shouts_map[shout.id] = shout
diff --git a/resolvers/zine/profile.py b/resolvers/zine/profile.py
index 552af43f..ecdc26c7 100644
--- a/resolvers/zine/profile.py
+++ b/resolvers/zine/profile.py
@@ -1,6 +1,7 @@
-from typing import List
from datetime import datetime, timedelta, timezone
-from sqlalchemy import and_, func, distinct, select, literal
+from typing import List
+
+from sqlalchemy import and_, distinct, func, literal, select
from sqlalchemy.orm import aliased, joinedload
from auth.authenticate import login_required
@@ -55,7 +56,7 @@ def add_stat(author, stat_columns):
"followers": followers_stat,
"followings": followings_stat,
"rating": rating_stat,
- "commented": commented_stat
+ "commented": commented_stat,
}
return author
@@ -119,10 +120,10 @@ async def user_followers(_, _info, slug) -> List[User]:
q = add_author_stat_columns(q)
aliased_user = aliased(User)
- q = q.join(AuthorFollower, AuthorFollower.follower == User.id).join(
- aliased_user, aliased_user.id == AuthorFollower.author
- ).where(
- aliased_user.slug == slug
+ q = (
+ q.join(AuthorFollower, AuthorFollower.follower == User.id)
+ .join(aliased_user, aliased_user.id == AuthorFollower.author)
+ .where(aliased_user.slug == slug)
)
return get_authors_from_query(q)
@@ -150,15 +151,10 @@ async def update_profile(_, info, profile):
with local_session() as session:
user = session.query(User).filter(User.id == user_id).one()
if not user:
- return {
- "error": "canoot find user"
- }
+ return {"error": "canoot find user"}
user.update(profile)
session.commit()
- return {
- "error": None,
- "author": user
- }
+ return {"error": None, "author": user}
@mutation.field("rateUser")
@@ -200,13 +196,10 @@ def author_follow(user_id, slug):
def author_unfollow(user_id, slug):
with local_session() as session:
flw = (
- session.query(
- AuthorFollower
- ).join(User, User.id == AuthorFollower.author).filter(
- and_(
- AuthorFollower.follower == user_id, User.slug == slug
- )
- ).first()
+ session.query(AuthorFollower)
+ .join(User, User.id == AuthorFollower.author)
+ .filter(and_(AuthorFollower.follower == user_id, User.slug == slug))
+ .first()
)
if flw:
session.delete(flw)
@@ -232,12 +225,11 @@ async def get_author(_, _info, slug):
[author] = get_authors_from_query(q)
with local_session() as session:
- comments_count = session.query(Reaction).where(
- and_(
- Reaction.createdBy == author.id,
- Reaction.kind == ReactionKind.COMMENT
- )
- ).count()
+ comments_count = (
+ session.query(Reaction)
+ .where(and_(Reaction.createdBy == author.id, Reaction.kind == ReactionKind.COMMENT))
+ .count()
+ )
author.stat["commented"] = comments_count
return author
@@ -260,9 +252,7 @@ async def load_authors_by(_, info, by, limit, offset):
days_before = datetime.now(tz=timezone.utc) - timedelta(days=by["createdAt"])
q = q.filter(User.createdAt > days_before)
- q = q.order_by(
- by.get("order", User.createdAt)
- ).limit(limit).offset(offset)
+ q = q.order_by(by.get("order", User.createdAt)).limit(limit).offset(offset)
return get_authors_from_query(q)
@@ -273,13 +263,13 @@ async def load_my_subscriptions(_, info):
auth = info.context["request"].auth
user_id = auth.user_id
- authors_query = select(User).join(AuthorFollower, AuthorFollower.author == User.id).where(
- AuthorFollower.follower == user_id
+ authors_query = (
+ select(User)
+ .join(AuthorFollower, AuthorFollower.author == User.id)
+ .where(AuthorFollower.follower == user_id)
)
- topics_query = select(Topic).join(TopicFollower).where(
- TopicFollower.follower == user_id
- )
+ topics_query = select(Topic).join(TopicFollower).where(TopicFollower.follower == user_id)
topics = []
authors = []
@@ -291,7 +281,4 @@ async def load_my_subscriptions(_, info):
for [topic] in session.execute(topics_query):
topics.append(topic)
- return {
- "topics": topics,
- "authors": authors
- }
+ return {"topics": topics, "authors": authors}
diff --git a/resolvers/zine/reactions.py b/resolvers/zine/reactions.py
index 1c132b69..0a37f6c3 100644
--- a/resolvers/zine/reactions.py
+++ b/resolvers/zine/reactions.py
@@ -1,5 +1,6 @@
from datetime import datetime, timedelta, timezone
-from sqlalchemy import and_, asc, desc, select, text, func, case
+
+from sqlalchemy import and_, asc, case, desc, func, select, text
from sqlalchemy.orm import aliased
from auth.authenticate import login_required
@@ -17,26 +18,22 @@ def add_reaction_stat_columns(q):
aliased_reaction = aliased(Reaction)
q = q.outerjoin(aliased_reaction, Reaction.id == aliased_reaction.replyTo).add_columns(
- func.sum(
- aliased_reaction.id
- ).label('reacted_stat'),
+ func.sum(aliased_reaction.id).label('reacted_stat'),
+ func.sum(case((aliased_reaction.body.is_not(None), 1), else_=0)).label('commented_stat'),
func.sum(
case(
- (aliased_reaction.body.is_not(None), 1),
- else_=0
+ (aliased_reaction.kind == ReactionKind.AGREE, 1),
+ (aliased_reaction.kind == ReactionKind.DISAGREE, -1),
+ (aliased_reaction.kind == ReactionKind.PROOF, 1),
+ (aliased_reaction.kind == ReactionKind.DISPROOF, -1),
+ (aliased_reaction.kind == ReactionKind.ACCEPT, 1),
+ (aliased_reaction.kind == ReactionKind.REJECT, -1),
+ (aliased_reaction.kind == ReactionKind.LIKE, 1),
+ (aliased_reaction.kind == ReactionKind.DISLIKE, -1),
+ else_=0,
)
- ).label('commented_stat'),
- func.sum(case(
- (aliased_reaction.kind == ReactionKind.AGREE, 1),
- (aliased_reaction.kind == ReactionKind.DISAGREE, -1),
- (aliased_reaction.kind == ReactionKind.PROOF, 1),
- (aliased_reaction.kind == ReactionKind.DISPROOF, -1),
- (aliased_reaction.kind == ReactionKind.ACCEPT, 1),
- (aliased_reaction.kind == ReactionKind.REJECT, -1),
- (aliased_reaction.kind == ReactionKind.LIKE, 1),
- (aliased_reaction.kind == ReactionKind.DISLIKE, -1),
- else_=0)
- ).label('rating_stat'))
+ ).label('rating_stat'),
+ )
return q
@@ -47,17 +44,19 @@ def reactions_follow(user_id, shout_id: int, auto=False):
shout = session.query(Shout).where(Shout.id == shout_id).one()
following = (
- session.query(ShoutReactionsFollower).where(and_(
- ShoutReactionsFollower.follower == user_id,
- ShoutReactionsFollower.shout == shout.id,
- )).first()
+ session.query(ShoutReactionsFollower)
+ .where(
+ and_(
+ ShoutReactionsFollower.follower == user_id,
+ ShoutReactionsFollower.shout == shout.id,
+ )
+ )
+ .first()
)
if not following:
following = ShoutReactionsFollower.create(
- follower=user_id,
- shout=shout.id,
- auto=auto
+ follower=user_id, shout=shout.id, auto=auto
)
session.add(following)
session.commit()
@@ -72,10 +71,14 @@ def reactions_unfollow(user_id: int, shout_id: int):
shout = session.query(Shout).where(Shout.id == shout_id).one()
following = (
- session.query(ShoutReactionsFollower).where(and_(
- ShoutReactionsFollower.follower == user_id,
- ShoutReactionsFollower.shout == shout.id
- )).first()
+ session.query(ShoutReactionsFollower)
+ .where(
+ and_(
+ ShoutReactionsFollower.follower == user_id,
+ ShoutReactionsFollower.shout == shout.id,
+ )
+ )
+ .first()
)
if following:
@@ -88,30 +91,31 @@ def reactions_unfollow(user_id: int, shout_id: int):
def is_published_author(session, user_id):
- ''' checks if user has at least one publication '''
- return session.query(
- Shout
- ).where(
- Shout.authors.contains(user_id)
- ).filter(
- and_(
- Shout.publishedAt.is_not(None),
- Shout.deletedAt.is_(None)
- )
- ).count() > 0
+ '''checks if user has at least one publication'''
+ return (
+ session.query(Shout)
+ .where(Shout.authors.contains(user_id))
+ .filter(and_(Shout.publishedAt.is_not(None), Shout.deletedAt.is_(None)))
+ .count()
+ > 0
+ )
def check_to_publish(session, user_id, reaction):
- ''' set shout to public if publicated approvers amount > 4 '''
+ '''set shout to public if publicated approvers amount > 4'''
if not reaction.replyTo and reaction.kind in [
ReactionKind.ACCEPT,
ReactionKind.LIKE,
- ReactionKind.PROOF
+ ReactionKind.PROOF,
]:
if is_published_author(user_id):
# now count how many approvers are voted already
- approvers_reactions = session.query(Reaction).where(Reaction.shout == reaction.shout).all()
- approvers = [user_id, ]
+ approvers_reactions = (
+ session.query(Reaction).where(Reaction.shout == reaction.shout).all()
+ )
+ approvers = [
+ user_id,
+ ]
for ar in approvers_reactions:
a = ar.createdBy
if is_published_author(session, a):
@@ -122,21 +126,17 @@ def check_to_publish(session, user_id, reaction):
def check_to_hide(session, user_id, reaction):
- ''' hides any shout if 20% of reactions are negative '''
+ '''hides any shout if 20% of reactions are negative'''
if not reaction.replyTo and reaction.kind in [
ReactionKind.REJECT,
ReactionKind.DISLIKE,
- ReactionKind.DISPROOF
+ ReactionKind.DISPROOF,
]:
# if is_published_author(user):
approvers_reactions = session.query(Reaction).where(Reaction.shout == reaction.shout).all()
rejects = 0
for r in approvers_reactions:
- if r.kind in [
- ReactionKind.REJECT,
- ReactionKind.DISLIKE,
- ReactionKind.DISPROOF
- ]:
+ if r.kind in [ReactionKind.REJECT, ReactionKind.DISLIKE, ReactionKind.DISPROOF]:
rejects += 1
if len(approvers_reactions) / rejects < 5:
return True
@@ -168,31 +168,40 @@ async def create_reaction(_, info, reaction):
shout = session.query(Shout).where(Shout.id == reaction["shout"]).one()
author = session.query(User).where(User.id == auth.user_id).one()
- if reaction["kind"] in [
- ReactionKind.DISLIKE.name,
- ReactionKind.LIKE.name
- ]:
- existing_reaction = session.query(Reaction).where(
- and_(
- Reaction.shout == reaction["shout"],
- Reaction.createdBy == auth.user_id,
- Reaction.kind == reaction["kind"],
- Reaction.replyTo == reaction.get("replyTo")
+ if reaction["kind"] in [ReactionKind.DISLIKE.name, ReactionKind.LIKE.name]:
+ existing_reaction = (
+ session.query(Reaction)
+ .where(
+ and_(
+ Reaction.shout == reaction["shout"],
+ Reaction.createdBy == auth.user_id,
+ Reaction.kind == reaction["kind"],
+ Reaction.replyTo == reaction.get("replyTo"),
+ )
)
- ).first()
+ .first()
+ )
if existing_reaction is not None:
raise OperationNotAllowed("You can't vote twice")
- opposite_reaction_kind = ReactionKind.DISLIKE if reaction["kind"] == ReactionKind.LIKE.name else ReactionKind.LIKE
- opposite_reaction = session.query(Reaction).where(
+ opposite_reaction_kind = (
+ ReactionKind.DISLIKE
+ if reaction["kind"] == ReactionKind.LIKE.name
+ else ReactionKind.LIKE
+ )
+ opposite_reaction = (
+ session.query(Reaction)
+ .where(
and_(
Reaction.shout == reaction["shout"],
Reaction.createdBy == auth.user_id,
Reaction.kind == opposite_reaction_kind,
- Reaction.replyTo == reaction.get("replyTo")
+ Reaction.replyTo == reaction.get("replyTo"),
)
- ).first()
+ )
+ .first()
+ )
if opposite_reaction is not None:
session.delete(opposite_reaction)
@@ -235,11 +244,7 @@ async def create_reaction(_, info, reaction):
except Exception as e:
print(f"[resolvers.reactions] error on reactions autofollowing: {e}")
- rdict['stat'] = {
- "commented": 0,
- "reacted": 0,
- "rating": 0
- }
+ rdict['stat'] = {"commented": 0, "reacted": 0, "rating": 0}
return {"reaction": rdict}
@@ -269,11 +274,7 @@ async def update_reaction(_, info, id, reaction={}):
if reaction.get("range"):
r.range = reaction.get("range")
session.commit()
- r.stat = {
- "commented": commented_stat,
- "reacted": reacted_stat,
- "rating": rating_stat
- }
+ r.stat = {"commented": commented_stat, "reacted": reacted_stat, "rating": rating_stat}
return {"reaction": r}
@@ -290,17 +291,12 @@ async def delete_reaction(_, info, id):
if r.createdBy != auth.user_id:
return {"error": "access denied"}
- if r.kind in [
- ReactionKind.LIKE,
- ReactionKind.DISLIKE
- ]:
+ if r.kind in [ReactionKind.LIKE, ReactionKind.DISLIKE]:
session.delete(r)
else:
r.deletedAt = datetime.now(tz=timezone.utc)
session.commit()
- return {
- "reaction": r
- }
+ return {"reaction": r}
@query.field("loadReactionsBy")
@@ -321,12 +317,10 @@ async def load_reactions_by(_, _info, by, limit=50, offset=0):
:return: Reaction[]
"""
- q = select(
- Reaction, User, Shout
- ).join(
- User, Reaction.createdBy == User.id
- ).join(
- Shout, Reaction.shout == Shout.id
+ q = (
+ select(Reaction, User, Shout)
+ .join(User, Reaction.createdBy == User.id)
+ .join(Shout, Reaction.shout == Shout.id)
)
if by.get("shout"):
@@ -354,11 +348,7 @@ async def load_reactions_by(_, _info, by, limit=50, offset=0):
order_way = asc if by.get("sort", "").startswith("-") else desc
order_field = by.get("sort", "").replace('-', '') or Reaction.createdAt
- q = q.group_by(
- Reaction.id, User.id, Shout.id
- ).order_by(
- order_way(order_field)
- )
+ q = q.group_by(Reaction.id, User.id, Shout.id).order_by(order_way(order_field))
q = add_reaction_stat_columns(q)
@@ -367,13 +357,15 @@ async def load_reactions_by(_, _info, by, limit=50, offset=0):
reactions = []
with local_session() as session:
- for [reaction, user, shout, reacted_stat, commented_stat, rating_stat] in session.execute(q):
+ for [reaction, user, shout, reacted_stat, commented_stat, rating_stat] in session.execute(
+ q
+ ):
reaction.createdBy = user
reaction.shout = shout
reaction.stat = {
"rating": rating_stat,
"commented": commented_stat,
- "reacted": reacted_stat
+ "reacted": reacted_stat,
}
reaction.kind = reaction.kind.name
diff --git a/resolvers/zine/topics.py b/resolvers/zine/topics.py
index f354a7b4..72ecf9ac 100644
--- a/resolvers/zine/topics.py
+++ b/resolvers/zine/topics.py
@@ -1,24 +1,25 @@
-from sqlalchemy import and_, select, distinct, func
+from sqlalchemy import and_, distinct, func, select
from sqlalchemy.orm import aliased
from auth.authenticate import login_required
from base.orm import local_session
from base.resolvers import mutation, query
-from orm.shout import ShoutTopic, ShoutAuthor
-from orm.topic import Topic, TopicFollower
from orm import User
+from orm.shout import ShoutAuthor, ShoutTopic
+from orm.topic import Topic, TopicFollower
def add_topic_stat_columns(q):
aliased_shout_author = aliased(ShoutAuthor)
aliased_topic_follower = aliased(TopicFollower)
- q = q.outerjoin(ShoutTopic, Topic.id == ShoutTopic.topic).add_columns(
- func.count(distinct(ShoutTopic.shout)).label('shouts_stat')
- ).outerjoin(aliased_shout_author, ShoutTopic.shout == aliased_shout_author.shout).add_columns(
- func.count(distinct(aliased_shout_author.user)).label('authors_stat')
- ).outerjoin(aliased_topic_follower).add_columns(
- func.count(distinct(aliased_topic_follower.follower)).label('followers_stat')
+ q = (
+ q.outerjoin(ShoutTopic, Topic.id == ShoutTopic.topic)
+ .add_columns(func.count(distinct(ShoutTopic.shout)).label('shouts_stat'))
+ .outerjoin(aliased_shout_author, ShoutTopic.shout == aliased_shout_author.shout)
+ .add_columns(func.count(distinct(aliased_shout_author.user)).label('authors_stat'))
+ .outerjoin(aliased_topic_follower)
+ .add_columns(func.count(distinct(aliased_topic_follower.follower)).label('followers_stat'))
)
q = q.group_by(Topic.id)
@@ -28,11 +29,7 @@ def add_topic_stat_columns(q):
def add_stat(topic, stat_columns):
[shouts_stat, authors_stat, followers_stat] = stat_columns
- topic.stat = {
- "shouts": shouts_stat,
- "authors": authors_stat,
- "followers": followers_stat
- }
+ topic.stat = {"shouts": shouts_stat, "authors": authors_stat, "followers": followers_stat}
return topic
@@ -133,12 +130,10 @@ def topic_unfollow(user_id, slug):
try:
with local_session() as session:
sub = (
- session.query(TopicFollower).join(Topic).filter(
- and_(
- TopicFollower.follower == user_id,
- Topic.slug == slug
- )
- ).first()
+ session.query(TopicFollower)
+ .join(Topic)
+ .filter(and_(TopicFollower.follower == user_id, Topic.slug == slug))
+ .first()
)
if sub:
session.delete(sub)
diff --git a/server.py b/server.py
index 753c60ae..48186da0 100644
--- a/server.py
+++ b/server.py
@@ -1,8 +1,9 @@
-import sys
import os
+import sys
+
import uvicorn
-from settings import PORT, DEV_SERVER_PID_FILE_NAME
+from settings import DEV_SERVER_PID_FILE_NAME, PORT
def exception_handler(exception_type, exception, traceback, debug_hook=sys.excepthook):
@@ -16,41 +17,30 @@ log_settings = {
'default': {
'()': 'uvicorn.logging.DefaultFormatter',
'fmt': '%(levelprefix)s %(message)s',
- 'use_colors': None
+ 'use_colors': None,
},
'access': {
'()': 'uvicorn.logging.AccessFormatter',
- 'fmt': '%(levelprefix)s %(client_addr)s - "%(request_line)s" %(status_code)s'
- }
+ 'fmt': '%(levelprefix)s %(client_addr)s - "%(request_line)s" %(status_code)s',
+ },
},
'handlers': {
'default': {
'formatter': 'default',
'class': 'logging.StreamHandler',
- 'stream': 'ext://sys.stderr'
+ 'stream': 'ext://sys.stderr',
},
'access': {
'formatter': 'access',
'class': 'logging.StreamHandler',
- 'stream': 'ext://sys.stdout'
- }
+ 'stream': 'ext://sys.stdout',
+ },
},
'loggers': {
- 'uvicorn': {
- 'handlers': ['default'],
- 'level': 'INFO'
- },
- 'uvicorn.error': {
- 'level': 'INFO',
- 'handlers': ['default'],
- 'propagate': True
- },
- 'uvicorn.access': {
- 'handlers': ['access'],
- 'level': 'INFO',
- 'propagate': False
- }
- }
+ 'uvicorn': {'handlers': ['default'], 'level': 'INFO'},
+ 'uvicorn.error': {'level': 'INFO', 'handlers': ['default'], 'propagate': True},
+ 'uvicorn.access': {'handlers': ['access'], 'level': 'INFO', 'propagate': False},
+ },
}
local_headers = [
@@ -86,24 +76,20 @@ if __name__ == "__main__":
# log_config=log_settings,
log_level=None,
access_log=True,
- reload=want_reload
+ reload=want_reload,
) # , ssl_keyfile="discours.key", ssl_certfile="discours.crt")
elif x == "migrate":
from migration import process
+
print("MODE: MIGRATE")
process()
elif x == "bson":
from migration.bson2json import json_tables
+
print("MODE: BSON")
json_tables()
else:
sys.excepthook = exception_handler
- uvicorn.run(
- "main:app",
- host="0.0.0.0",
- port=PORT,
- proxy_headers=True,
- server_header=True
- )
+ uvicorn.run("main:app", host="0.0.0.0", port=PORT, proxy_headers=True, server_header=True)
diff --git a/services/following.py b/services/following.py
index 8410eb2d..8261d696 100644
--- a/services/following.py
+++ b/services/following.py
@@ -18,12 +18,7 @@ class Following:
class FollowingManager:
lock = asyncio.Lock()
- data = {
- 'author': [],
- 'topic': [],
- 'shout': [],
- 'chat': []
- }
+ data = {'author': [], 'topic': [], 'shout': [], 'chat': []}
@staticmethod
async def register(kind, uid):
diff --git a/services/main.py b/services/main.py
index 10301b86..98fddcc1 100644
--- a/services/main.py
+++ b/services/main.py
@@ -1,6 +1,6 @@
+from base.orm import local_session
from services.search import SearchService
from services.stat.viewed import ViewedStorage
-from base.orm import local_session
async def storages_init():
diff --git a/services/notifications/notification_service.py b/services/notifications/notification_service.py
index 7e92aa95..8467e836 100644
--- a/services/notifications/notification_service.py
+++ b/services/notifications/notification_service.py
@@ -5,26 +5,18 @@ from datetime import datetime, timezone
from sqlalchemy import and_
from base.orm import local_session
-from orm import Reaction, Shout, Notification, User
+from orm import Notification, Reaction, Shout, User
from orm.notification import NotificationType
from orm.reaction import ReactionKind
from services.notifications.sse import connection_manager
def shout_to_shout_data(shout):
- return {
- "title": shout.title,
- "slug": shout.slug
- }
+ return {"title": shout.title, "slug": shout.slug}
def user_to_user_data(user):
- return {
- "id": user.id,
- "name": user.name,
- "slug": user.slug,
- "userpic": user.userpic
- }
+ return {"id": user.id, "name": user.name, "slug": user.slug, "userpic": user.userpic}
def update_prev_notification(notification, user, reaction):
@@ -57,34 +49,45 @@ class NewReactionNotificator:
if reaction.kind == ReactionKind.COMMENT:
parent_reaction = None
if reaction.replyTo:
- parent_reaction = session.query(Reaction).where(Reaction.id == reaction.replyTo).one()
+ parent_reaction = (
+ session.query(Reaction).where(Reaction.id == reaction.replyTo).one()
+ )
if parent_reaction.createdBy != reaction.createdBy:
- prev_new_reply_notification = session.query(Notification).where(
- and_(
- Notification.user == shout.createdBy,
- Notification.type == NotificationType.NEW_REPLY,
- Notification.shout == shout.id,
- Notification.reaction == parent_reaction.id,
- Notification.seen == False
+ prev_new_reply_notification = (
+ session.query(Notification)
+ .where(
+ and_(
+ Notification.user == shout.createdBy,
+ Notification.type == NotificationType.NEW_REPLY,
+ Notification.shout == shout.id,
+ Notification.reaction == parent_reaction.id,
+ Notification.seen == False,
+ )
)
- ).first()
+ .first()
+ )
if prev_new_reply_notification:
update_prev_notification(prev_new_reply_notification, user, reaction)
else:
- reply_notification_data = json.dumps({
- "shout": shout_to_shout_data(shout),
- "users": [user_to_user_data(user)],
- "reactionIds": [reaction.id]
- }, ensure_ascii=False)
+ reply_notification_data = json.dumps(
+ {
+ "shout": shout_to_shout_data(shout),
+ "users": [user_to_user_data(user)],
+ "reactionIds": [reaction.id],
+ },
+ ensure_ascii=False,
+ )
- reply_notification = Notification.create(**{
- "user": parent_reaction.createdBy,
- "type": NotificationType.NEW_REPLY,
- "shout": shout.id,
- "reaction": parent_reaction.id,
- "data": reply_notification_data
- })
+ reply_notification = Notification.create(
+ **{
+ "user": parent_reaction.createdBy,
+ "type": NotificationType.NEW_REPLY,
+ "shout": shout.id,
+ "reaction": parent_reaction.id,
+ "data": reply_notification_data,
+ }
+ )
session.add(reply_notification)
@@ -93,30 +96,39 @@ class NewReactionNotificator:
if reaction.createdBy != shout.createdBy and (
parent_reaction is None or parent_reaction.createdBy != shout.createdBy
):
- prev_new_comment_notification = session.query(Notification).where(
- and_(
- Notification.user == shout.createdBy,
- Notification.type == NotificationType.NEW_COMMENT,
- Notification.shout == shout.id,
- Notification.seen == False
+ prev_new_comment_notification = (
+ session.query(Notification)
+ .where(
+ and_(
+ Notification.user == shout.createdBy,
+ Notification.type == NotificationType.NEW_COMMENT,
+ Notification.shout == shout.id,
+ Notification.seen == False,
+ )
)
- ).first()
+ .first()
+ )
if prev_new_comment_notification:
update_prev_notification(prev_new_comment_notification, user, reaction)
else:
- notification_data_string = json.dumps({
- "shout": shout_to_shout_data(shout),
- "users": [user_to_user_data(user)],
- "reactionIds": [reaction.id]
- }, ensure_ascii=False)
+ notification_data_string = json.dumps(
+ {
+ "shout": shout_to_shout_data(shout),
+ "users": [user_to_user_data(user)],
+ "reactionIds": [reaction.id],
+ },
+ ensure_ascii=False,
+ )
- author_notification = Notification.create(**{
- "user": shout.createdBy,
- "type": NotificationType.NEW_COMMENT,
- "shout": shout.id,
- "data": notification_data_string
- })
+ author_notification = Notification.create(
+ **{
+ "user": shout.createdBy,
+ "type": NotificationType.NEW_COMMENT,
+ "shout": shout.id,
+ "data": notification_data_string,
+ }
+ )
session.add(author_notification)
diff --git a/services/notifications/sse.py b/services/notifications/sse.py
index 085dbde0..55cae575 100644
--- a/services/notifications/sse.py
+++ b/services/notifications/sse.py
@@ -1,8 +1,8 @@
+import asyncio
import json
from sse_starlette.sse import EventSourceResponse
from starlette.requests import Request
-import asyncio
class ConnectionManager:
@@ -28,9 +28,7 @@ class ConnectionManager:
return
for connection in self.connections_by_user_id[user_id]:
- data = {
- "type": "newNotifications"
- }
+ data = {"type": "newNotifications"}
data_string = json.dumps(data, ensure_ascii=False)
await connection.put(data_string)
diff --git a/services/search.py b/services/search.py
index 834e5bf7..d1748cdd 100644
--- a/services/search.py
+++ b/services/search.py
@@ -1,5 +1,6 @@
import asyncio
import json
+
from base.redis import redis
from orm.shout import Shout
from resolvers.zine.load import load_shouts_by
@@ -20,12 +21,7 @@ class SearchService:
cached = await redis.execute("GET", text)
if not cached:
async with SearchService.lock:
- options = {
- "title": text,
- "body": text,
- "limit": limit,
- "offset": offset
- }
+ options = {"title": text, "body": text, "limit": limit, "offset": offset}
payload = await load_shouts_by(None, None, options)
await redis.execute("SET", text, json.dumps(payload))
return payload
diff --git a/services/stat/viewed.py b/services/stat/viewed.py
index 905ade43..c9f9a6db 100644
--- a/services/stat/viewed.py
+++ b/services/stat/viewed.py
@@ -1,6 +1,6 @@
import asyncio
import time
-from datetime import timedelta, timezone, datetime
+from datetime import datetime, timedelta, timezone
from os import environ, path
from ssl import create_default_context
@@ -9,10 +9,11 @@ from gql.transport.aiohttp import AIOHTTPTransport
from sqlalchemy import func
from base.orm import local_session
-from orm import User, Topic
-from orm.shout import ShoutTopic, Shout
+from orm import Topic, User
+from orm.shout import Shout, ShoutTopic
-load_facts = gql("""
+load_facts = gql(
+ """
query getDomains {
domains {
id
@@ -25,9 +26,11 @@ query getDomains {
}
}
}
-""")
+"""
+)
-load_pages = gql("""
+load_pages = gql(
+ """
query getDomains {
domains {
title
@@ -41,7 +44,8 @@ query getDomains {
}
}
}
-""")
+"""
+)
schema_str = open(path.dirname(__file__) + '/ackee.graphql').read()
token = environ.get("ACKEE_TOKEN", "")
@@ -50,10 +54,8 @@ def create_client(headers=None, schema=None):
return Client(
schema=schema,
transport=AIOHTTPTransport(
- url="https://ackee.discours.io/api",
- ssl=create_default_context(),
- headers=headers
- )
+ url="https://ackee.discours.io/api", ssl=create_default_context(), headers=headers
+ ),
)
@@ -71,13 +73,13 @@ class ViewedStorage:
@staticmethod
async def init():
- """ graphql client connection using permanent token """
+ """graphql client connection using permanent token"""
self = ViewedStorage
async with self.lock:
if token:
- self.client = create_client({
- "Authorization": "Bearer %s" % str(token)
- }, schema=schema_str)
+ self.client = create_client(
+ {"Authorization": "Bearer %s" % str(token)}, schema=schema_str
+ )
print("[stat.viewed] * authorized permanentely by ackee.discours.io: %s" % token)
else:
print("[stat.viewed] * please set ACKEE_TOKEN")
@@ -85,7 +87,7 @@ class ViewedStorage:
@staticmethod
async def update_pages():
- """ query all the pages from ackee sorted by views count """
+ """query all the pages from ackee sorted by views count"""
print("[stat.viewed] ⎧ updating ackee pages data ---")
start = time.time()
self = ViewedStorage
@@ -118,7 +120,7 @@ class ViewedStorage:
# unused yet
@staticmethod
async def get_shout(shout_slug):
- """ getting shout views metric by slug """
+ """getting shout views metric by slug"""
self = ViewedStorage
async with self.lock:
shout_views = self.by_shouts.get(shout_slug)
@@ -136,7 +138,7 @@ class ViewedStorage:
@staticmethod
async def get_topic(topic_slug):
- """ getting topic views value summed """
+ """getting topic views value summed"""
self = ViewedStorage
topic_views = 0
async with self.lock:
@@ -146,18 +148,22 @@ class ViewedStorage:
@staticmethod
def update_topics(session, shout_slug):
- """ updates topics counters by shout slug """
+ """updates topics counters by shout slug"""
self = ViewedStorage
- for [shout_topic, topic] in session.query(ShoutTopic, Topic).join(Topic).join(Shout).where(
- Shout.slug == shout_slug
- ).all():
+ for [shout_topic, topic] in (
+ session.query(ShoutTopic, Topic)
+ .join(Topic)
+ .join(Shout)
+ .where(Shout.slug == shout_slug)
+ .all()
+ ):
if not self.by_topics.get(topic.slug):
self.by_topics[topic.slug] = {}
self.by_topics[topic.slug][shout_slug] = self.by_shouts[shout_slug]
@staticmethod
async def increment(shout_slug, amount=1, viewer='ackee'):
- """ the only way to change views counter """
+ """the only way to change views counter"""
self = ViewedStorage
async with self.lock:
# TODO optimize, currenty we execute 1 DB transaction per shout
@@ -185,7 +191,7 @@ class ViewedStorage:
@staticmethod
async def worker():
- """ async task worker """
+ """async task worker"""
failed = 0
self = ViewedStorage
if self.disabled:
@@ -205,9 +211,10 @@ class ViewedStorage:
if failed == 0:
when = datetime.now(timezone.utc) + timedelta(seconds=self.period)
t = format(when.astimezone().isoformat())
- print("[stat.viewed] ⎩ next update: %s" % (
- t.split("T")[0] + " " + t.split("T")[1].split(".")[0]
- ))
+ print(
+ "[stat.viewed] ⎩ next update: %s"
+ % (t.split("T")[0] + " " + t.split("T")[1].split(".")[0])
+ )
await asyncio.sleep(self.period)
else:
await asyncio.sleep(10)
diff --git a/settings.py b/settings.py
index 270b4551..bd096081 100644
--- a/settings.py
+++ b/settings.py
@@ -3,8 +3,9 @@ from os import environ
PORT = 8080
DB_URL = (
- environ.get("DATABASE_URL") or environ.get("DB_URL") or
- "postgresql://postgres@localhost:5432/discoursio"
+ environ.get("DATABASE_URL")
+ or environ.get("DB_URL")
+ or "postgresql://postgres@localhost:5432/discoursio"
)
JWT_ALGORITHM = "HS256"
JWT_SECRET_KEY = environ.get("JWT_SECRET_KEY") or "8f1bd7696ffb482d8486dfbc6e7d16dd-secret-key"
diff --git a/validations/auth.py b/validations/auth.py
index 216d7dcb..73b83079 100644
--- a/validations/auth.py
+++ b/validations/auth.py
@@ -1,4 +1,5 @@
from typing import Optional, Text
+
from pydantic import BaseModel
diff --git a/validations/inbox.py b/validations/inbox.py
index d03cca05..58645dd9 100644
--- a/validations/inbox.py
+++ b/validations/inbox.py
@@ -1,4 +1,5 @@
-from typing import Optional, Text, List
+from typing import List, Optional, Text
+
from pydantic import BaseModel
From 54457cb9c58e07a1d2c18d6be231658644372225 Mon Sep 17 00:00:00 2001
From: Igor Lobanov
Date: Thu, 26 Oct 2023 19:57:17 +0200
Subject: [PATCH 05/27] lint wip
---
auth/identity.py | 1 -
main.py | 1 -
resolvers/auth.py | 8 +-------
resolvers/zine/following.py | 1 -
4 files changed, 1 insertion(+), 10 deletions(-)
diff --git a/auth/identity.py b/auth/identity.py
index cc1bf3c8..2db9772a 100644
--- a/auth/identity.py
+++ b/auth/identity.py
@@ -7,7 +7,6 @@ from sqlalchemy import or_
from auth.jwtcodec import JWTCodec
from auth.tokenstorage import TokenStorage
-
# from base.exceptions import InvalidPassword, InvalidToken
from base.orm import local_session
from orm import User
diff --git a/main.py b/main.py
index 3f839ab5..d3af4224 100644
--- a/main.py
+++ b/main.py
@@ -22,7 +22,6 @@ from services.main import storages_init
from services.notifications.notification_service import notification_service
from services.notifications.sse import sse_subscribe_handler
from services.stat.viewed import ViewedStorage
-
# from services.zine.gittask import GitTask
from settings import DEV_SERVER_PID_FILE_NAME, SENTRY_DSN, SESSION_SECRET_KEY
diff --git a/resolvers/auth.py b/resolvers/auth.py
index c28898e3..3e66ef3b 100644
--- a/resolvers/auth.py
+++ b/resolvers/auth.py
@@ -14,13 +14,7 @@ from auth.email import send_auth_email
from auth.identity import Identity, Password
from auth.jwtcodec import JWTCodec
from auth.tokenstorage import TokenStorage
-from base.exceptions import (
- BaseHttpException,
- InvalidPassword,
- InvalidToken,
- ObjectNotExist,
- Unauthorized,
-)
+from base.exceptions import BaseHttpException, InvalidPassword, InvalidToken, ObjectNotExist, Unauthorized
from base.orm import local_session
from base.resolvers import mutation, query
from orm import Role, User
diff --git a/resolvers/zine/following.py b/resolvers/zine/following.py
index 24935d5e..9f08848e 100644
--- a/resolvers/zine/following.py
+++ b/resolvers/zine/following.py
@@ -8,7 +8,6 @@ from base.orm import local_session
from base.resolvers import mutation
from orm.shout import ShoutReactionsFollower
from orm.topic import TopicFollower
-
# from resolvers.community import community_follow, community_unfollow
from orm.user import AuthorFollower
from resolvers.zine.profile import author_follow, author_unfollow
From 1c49780cd4c5c8f9d6085c4e9dd75b059f41259b Mon Sep 17 00:00:00 2001
From: Igor Lobanov
Date: Thu, 26 Oct 2023 20:05:32 +0200
Subject: [PATCH 06/27] lint wip
---
.pre-commit-config.yaml | 8 ++++----
auth/identity.py | 1 +
main.py | 1 +
resolvers/auth.py | 8 +++++++-
resolvers/zine/following.py | 1 +
5 files changed, 14 insertions(+), 5 deletions(-)
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 74bd1516..c25b29a2 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -22,10 +22,10 @@ repos:
- id: end-of-file-fixer
- id: trailing-whitespace
- - repo: https://github.com/timothycrosley/isort
- rev: 5.12.0
- hooks:
- - id: isort
+# - repo: https://github.com/timothycrosley/isort
+# rev: 5.12.0
+# hooks:
+# - id: isort
- repo: https://github.com/ambv/black
rev: 23.9.1
diff --git a/auth/identity.py b/auth/identity.py
index 2db9772a..cc1bf3c8 100644
--- a/auth/identity.py
+++ b/auth/identity.py
@@ -7,6 +7,7 @@ from sqlalchemy import or_
from auth.jwtcodec import JWTCodec
from auth.tokenstorage import TokenStorage
+
# from base.exceptions import InvalidPassword, InvalidToken
from base.orm import local_session
from orm import User
diff --git a/main.py b/main.py
index d3af4224..3f839ab5 100644
--- a/main.py
+++ b/main.py
@@ -22,6 +22,7 @@ from services.main import storages_init
from services.notifications.notification_service import notification_service
from services.notifications.sse import sse_subscribe_handler
from services.stat.viewed import ViewedStorage
+
# from services.zine.gittask import GitTask
from settings import DEV_SERVER_PID_FILE_NAME, SENTRY_DSN, SESSION_SECRET_KEY
diff --git a/resolvers/auth.py b/resolvers/auth.py
index 3e66ef3b..c28898e3 100644
--- a/resolvers/auth.py
+++ b/resolvers/auth.py
@@ -14,7 +14,13 @@ from auth.email import send_auth_email
from auth.identity import Identity, Password
from auth.jwtcodec import JWTCodec
from auth.tokenstorage import TokenStorage
-from base.exceptions import BaseHttpException, InvalidPassword, InvalidToken, ObjectNotExist, Unauthorized
+from base.exceptions import (
+ BaseHttpException,
+ InvalidPassword,
+ InvalidToken,
+ ObjectNotExist,
+ Unauthorized,
+)
from base.orm import local_session
from base.resolvers import mutation, query
from orm import Role, User
diff --git a/resolvers/zine/following.py b/resolvers/zine/following.py
index 9f08848e..24935d5e 100644
--- a/resolvers/zine/following.py
+++ b/resolvers/zine/following.py
@@ -8,6 +8,7 @@ from base.orm import local_session
from base.resolvers import mutation
from orm.shout import ShoutReactionsFollower
from orm.topic import TopicFollower
+
# from resolvers.community import community_follow, community_unfollow
from orm.user import AuthorFollower
from resolvers.zine.profile import author_follow, author_unfollow
From c2cc428abe58bbf50215b8727151406d6acd13e4 Mon Sep 17 00:00:00 2001
From: Igor Lobanov
Date: Thu, 26 Oct 2023 22:38:31 +0200
Subject: [PATCH 07/27] lint
---
.flake8 | 6 +-
.pre-commit-config.yaml | 5 -
ai/preprocess.py | 21 ++-
alembic/env.py | 9 +-
alembic/versions/fe943b098418_init_alembic.py | 6 +-
auth/authenticate.py | 31 +++--
auth/credentials.py | 3 +-
auth/email.py | 16 +--
auth/identity.py | 15 +--
auth/jwtcodec.py | 21 ++-
auth/oauth.py | 5 +-
auth/tokenstorage.py | 5 +-
base/orm.py | 8 +-
base/redis.py | 4 +-
main.py | 27 ++--
migration/__init__.py | 36 ++---
migration/bson2json.py | 9 +-
migration/export.py | 9 +-
migration/extract.py | 125 +++++++++---------
migration/html2text/__init__.py | 16 +--
migration/html2text/cli.py | 4 +-
migration/html2text/utils.py | 4 +-
migration/tables/comments.py | 20 ++-
migration/tables/content_items.py | 51 ++++---
migration/tables/remarks.py | 22 +--
migration/tables/users.py | 15 +--
orm/collection.py | 6 +-
orm/community.py | 8 +-
orm/notification.py | 4 +-
orm/rbac.py | 27 ++--
orm/reaction.py | 4 +-
orm/shout.py | 17 ++-
orm/topic.py | 6 +-
orm/user.py | 11 +-
requirements-dev.txt | 1 +
requirements.txt | 3 -
resolvers/__init__.py | 35 -----
resolvers/auth.py | 35 +++--
resolvers/create/editor.py | 27 ++--
resolvers/create/migrate.py | 20 +--
resolvers/inbox/chats.py | 16 +--
resolvers/inbox/load.py | 32 ++---
resolvers/inbox/messages.py | 29 ++--
resolvers/inbox/search.py | 14 +-
resolvers/notifications.py | 11 +-
resolvers/upload.py | 32 ++---
resolvers/zine/following.py | 42 +++---
resolvers/zine/load.py | 60 +++++----
resolvers/zine/profile.py | 35 +++--
resolvers/zine/reactions.py | 59 +++++----
resolvers/zine/topics.py | 17 ++-
server.py | 62 +++++----
services/following.py | 8 +-
services/main.py | 8 +-
.../notifications/notification_service.py | 26 ++--
services/notifications/sse.py | 6 +-
services/search.py | 15 ++-
services/stat/viewed.py | 28 ++--
services/zine/gittask.py | 6 +-
settings.py | 2 +-
setup.cfg | 7 +-
setup.cfg.bak | 39 ++++++
validations/auth.py | 3 +-
validations/inbox.py | 3 +-
64 files changed, 631 insertions(+), 626 deletions(-)
delete mode 100644 resolvers/__init__.py
mode change 100755 => 100644 setup.cfg
create mode 100644 setup.cfg.bak
diff --git a/.flake8 b/.flake8
index e82de95a..523cb30f 100644
--- a/.flake8
+++ b/.flake8
@@ -1,6 +1,6 @@
[flake8]
-ignore = E203,W504,W191,W503
+ignore = E203
exclude = .git,__pycache__,orm/rbac.py
-max-complexity = 10
-max-line-length = 108
+max-complexity = 15
+max-line-length = 100
indent-string = ' '
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index c25b29a2..42569413 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -17,7 +17,6 @@ repos:
- id: check-docstring-first
- id: check-json
- id: check-merge-conflict
- - id: check-toml
- id: check-yaml
- id: end-of-file-fixer
- id: trailing-whitespace
@@ -33,12 +32,8 @@ repos:
- id: black
args:
- --line-length=100
- - --skip-string-normalization
- repo: https://github.com/PyCQA/flake8
rev: 6.1.0
hooks:
- id: flake8
- args:
- - --max-line-length=100
- - --disable=protected-access
diff --git a/ai/preprocess.py b/ai/preprocess.py
index 6cc5ae5a..82d06f71 100644
--- a/ai/preprocess.py
+++ b/ai/preprocess.py
@@ -1,29 +1,28 @@
-import re
-from string import punctuation
-
-import nltk
from bs4 import BeautifulSoup
from nltk.corpus import stopwords
from pymystem3 import Mystem
-from transformers import BertTokenizer
+from string import punctuation
+
+import nltk
+import re
nltk.download("stopwords")
def get_clear_text(text):
- soup = BeautifulSoup(text, 'html.parser')
+ soup = BeautifulSoup(text, "html.parser")
# extract the plain text from the HTML document without tags
- clear_text = ''
+ clear_text = ""
for tag in soup.find_all():
- clear_text += tag.string or ''
+ clear_text += tag.string or ""
- clear_text = re.sub(pattern='[\u202F\u00A0\n]+', repl=' ', string=clear_text)
+ clear_text = re.sub(pattern="[\u202F\u00A0\n]+", repl=" ", string=clear_text)
# only words
- clear_text = re.sub(pattern='[^A-ZА-ЯЁ -]', repl='', string=clear_text, flags=re.IGNORECASE)
+ clear_text = re.sub(pattern="[^A-ZА-ЯЁ -]", repl="", string=clear_text, flags=re.IGNORECASE)
- clear_text = re.sub(pattern='\s+', repl=' ', string=clear_text)
+ clear_text = re.sub(pattern=r"\s+", repl=" ", string=clear_text)
clear_text = clear_text.lower()
diff --git a/alembic/env.py b/alembic/env.py
index 91012c34..58e3e200 100644
--- a/alembic/env.py
+++ b/alembic/env.py
@@ -1,9 +1,8 @@
-from logging.config import fileConfig
-
-from sqlalchemy import engine_from_config, pool
-
from alembic import context
+from base.orm import Base
+from logging.config import fileConfig
from settings import DB_URL
+from sqlalchemy import engine_from_config, pool
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
@@ -17,8 +16,6 @@ config.set_section_option(config.config_ini_section, "DB_URL", DB_URL)
if config.config_file_name is not None:
fileConfig(config.config_file_name)
-from base.orm import Base
-
target_metadata = [Base.metadata]
# other values from the config, defined by the needs of env.py,
diff --git a/alembic/versions/fe943b098418_init_alembic.py b/alembic/versions/fe943b098418_init_alembic.py
index 6f62301f..52796fea 100644
--- a/alembic/versions/fe943b098418_init_alembic.py
+++ b/alembic/versions/fe943b098418_init_alembic.py
@@ -7,12 +7,12 @@ Create Date: 2023-08-19 01:37:57.031933
"""
from typing import Sequence, Union
-import sqlalchemy as sa
+# import sqlalchemy as sa
-from alembic import op
+# from alembic import op
# revision identifiers, used by Alembic.
-revision: str = 'fe943b098418'
+revision: str = "fe943b098418"
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
diff --git a/auth/authenticate.py b/auth/authenticate.py
index 7792766d..9e4c93fc 100644
--- a/auth/authenticate.py
+++ b/auth/authenticate.py
@@ -1,17 +1,15 @@
-from functools import wraps
-from typing import Optional, Tuple
-
-from graphql.type import GraphQLResolveInfo
-from sqlalchemy.orm import exc, joinedload
-from starlette.authentication import AuthenticationBackend
-from starlette.requests import HTTPConnection
-
from auth.credentials import AuthCredentials, AuthUser
from auth.tokenstorage import SessionToken
from base.exceptions import OperationNotAllowed
from base.orm import local_session
+from functools import wraps
+from graphql.type import GraphQLResolveInfo
from orm.user import Role, User
from settings import SESSION_TOKEN_HEADER
+from sqlalchemy.orm import exc, joinedload
+from starlette.authentication import AuthenticationBackend
+from starlette.requests import HTTPConnection
+from typing import Optional, Tuple
class JWTAuthenticate(AuthenticationBackend):
@@ -19,16 +17,16 @@ class JWTAuthenticate(AuthenticationBackend):
self, request: HTTPConnection
) -> Optional[Tuple[AuthCredentials, AuthUser]]:
if SESSION_TOKEN_HEADER not in request.headers:
- return AuthCredentials(scopes={}), AuthUser(user_id=None, username='')
+ return AuthCredentials(scopes={}), AuthUser(user_id=None, username="")
token = request.headers.get(SESSION_TOKEN_HEADER)
if not token:
print("[auth.authenticate] no token in header %s" % SESSION_TOKEN_HEADER)
return AuthCredentials(scopes={}, error_message=str("no token")), AuthUser(
- user_id=None, username=''
+ user_id=None, username=""
)
- if len(token.split('.')) > 1:
+ if len(token.split(".")) > 1:
payload = await SessionToken.verify(token)
with local_session() as session:
@@ -47,20 +45,21 @@ class JWTAuthenticate(AuthenticationBackend):
return (
AuthCredentials(user_id=payload.user_id, scopes=scopes, logged_in=True),
- AuthUser(user_id=user.id, username=''),
+ AuthUser(user_id=user.id, username=""),
)
except exc.NoResultFound:
pass
- return AuthCredentials(scopes={}, error_message=str('Invalid token')), AuthUser(
- user_id=None, username=''
+ return AuthCredentials(scopes={}, error_message=str("Invalid token")), AuthUser(
+ user_id=None, username=""
)
def login_required(func):
@wraps(func)
async def wrap(parent, info: GraphQLResolveInfo, *args, **kwargs):
- # print('[auth.authenticate] login required for %r with info %r' % (func, info)) # debug only
+ # debug only
+ # print('[auth.authenticate] login required for %r with info %r' % (func, info))
auth: AuthCredentials = info.context["request"].auth
# print(auth)
if not auth or not auth.logged_in:
@@ -75,7 +74,7 @@ def permission_required(resource, operation, func):
@wraps(func)
async def wrap(parent, info: GraphQLResolveInfo, *args, **kwargs):
print(
- '[auth.authenticate] permission_required for %r with info %r' % (func, info)
+ "[auth.authenticate] permission_required for %r with info %r" % (func, info)
) # debug only
auth: AuthCredentials = info.context["request"].auth
if not auth.logged_in:
diff --git a/auth/credentials.py b/auth/credentials.py
index 63a1d161..856c2374 100644
--- a/auth/credentials.py
+++ b/auth/credentials.py
@@ -1,6 +1,5 @@
-from typing import List, Optional, Text
-
from pydantic import BaseModel
+from typing import List, Optional, Text
# from base.exceptions import Unauthorized
diff --git a/auth/email.py b/auth/email.py
index ca8b2bc4..faa64725 100644
--- a/auth/email.py
+++ b/auth/email.py
@@ -1,17 +1,17 @@
-import requests
-
from settings import MAILGUN_API_KEY, MAILGUN_DOMAIN
-api_url = "https://api.mailgun.net/v3/%s/messages" % (MAILGUN_DOMAIN or 'discours.io')
-noreply = "discours.io " % (MAILGUN_DOMAIN or 'discours.io')
+import requests
+
+api_url = "https://api.mailgun.net/v3/%s/messages" % (MAILGUN_DOMAIN or "discours.io")
+noreply = "discours.io " % (MAILGUN_DOMAIN or "discours.io")
lang_subject = {"ru": "Подтверждение почты", "en": "Confirm email"}
async def send_auth_email(user, token, lang="ru", template="email_confirmation"):
try:
to = "%s <%s>" % (user.name, user.email)
- if lang not in ['ru', 'en']:
- lang = 'ru'
+ if lang not in ["ru", "en"]:
+ lang = "ru"
subject = lang_subject.get(lang, lang_subject["en"])
template = template + "_" + lang
payload = {
@@ -19,9 +19,9 @@ async def send_auth_email(user, token, lang="ru", template="email_confirmation")
"to": to,
"subject": subject,
"template": template,
- "h:X-Mailgun-Variables": "{ \"token\": \"%s\" }" % token,
+ "h:X-Mailgun-Variables": '{ "token": "%s" }' % token,
}
- print('[auth.email] payload: %r' % payload)
+ print("[auth.email] payload: %r" % payload)
# debug
# print('http://localhost:3000/?modal=auth&mode=confirm-email&token=%s' % token)
response = requests.post(api_url, auth=("api", MAILGUN_API_KEY), data=payload)
diff --git a/auth/identity.py b/auth/identity.py
index cc1bf3c8..6e89079f 100644
--- a/auth/identity.py
+++ b/auth/identity.py
@@ -1,16 +1,14 @@
-from binascii import hexlify
-from hashlib import sha256
-
-from jwt import DecodeError, ExpiredSignatureError
-from passlib.hash import bcrypt
-from sqlalchemy import or_
-
from auth.jwtcodec import JWTCodec
from auth.tokenstorage import TokenStorage
# from base.exceptions import InvalidPassword, InvalidToken
from base.orm import local_session
+from binascii import hexlify
+from hashlib import sha256
+from jwt import DecodeError, ExpiredSignatureError
from orm import User
+from passlib.hash import bcrypt
+from sqlalchemy import or_
from validations.auth import AuthInput
@@ -35,6 +33,7 @@ class Password:
Verify that password hash is equal to specified hash. Hash format:
$2a$10$Ro0CUfOqk6cXEKf3dyaM7OhSCvnwM9s4wIX9JeLapehKK5YdLxKcm
+ # noqa: W605
\__/\/ \____________________/\_____________________________/
| | Salt Hash
| Cost
@@ -84,7 +83,7 @@ class Identity:
@staticmethod
async def onetime(token: str) -> User:
try:
- print('[auth.identity] using one time token')
+ print("[auth.identity] using one time token")
payload = JWTCodec.decode(token)
if not await TokenStorage.exist(f"{payload.user_id}-{payload.username}-{token}"):
# raise InvalidToken("Login token has expired, please login again")
diff --git a/auth/jwtcodec.py b/auth/jwtcodec.py
index 8fc12d27..870ed540 100644
--- a/auth/jwtcodec.py
+++ b/auth/jwtcodec.py
@@ -1,11 +1,10 @@
-from datetime import datetime, timezone
-
-import jwt
-
from base.exceptions import ExpiredToken, InvalidToken
+from datetime import datetime, timezone
from settings import JWT_ALGORITHM, JWT_SECRET_KEY
from validations.auth import AuthInput, TokenPayload
+import jwt
+
class JWTCodec:
@staticmethod
@@ -20,7 +19,7 @@ class JWTCodec:
try:
return jwt.encode(payload, JWT_SECRET_KEY, JWT_ALGORITHM)
except Exception as e:
- print('[auth.jwtcodec] JWT encode error %r' % e)
+ print("[auth.jwtcodec] JWT encode error %r" % e)
@staticmethod
def decode(token: str, verify_exp: bool = True) -> TokenPayload:
@@ -41,12 +40,12 @@ class JWTCodec:
# print('[auth.jwtcodec] debug token %r' % r)
return r
except jwt.InvalidIssuedAtError:
- print('[auth.jwtcodec] invalid issued at: %r' % payload)
- raise ExpiredToken('check token issued time')
+ print("[auth.jwtcodec] invalid issued at: %r" % payload)
+ raise ExpiredToken("check token issued time")
except jwt.ExpiredSignatureError:
- print('[auth.jwtcodec] expired signature %r' % payload)
- raise ExpiredToken('check token lifetime')
+ print("[auth.jwtcodec] expired signature %r" % payload)
+ raise ExpiredToken("check token lifetime")
except jwt.InvalidTokenError:
- raise InvalidToken('token is not valid')
+ raise InvalidToken("token is not valid")
except jwt.InvalidSignatureError:
- raise InvalidToken('token is not valid')
+ raise InvalidToken("token is not valid")
diff --git a/auth/oauth.py b/auth/oauth.py
index 02f56ff5..89695c72 100644
--- a/auth/oauth.py
+++ b/auth/oauth.py
@@ -1,9 +1,8 @@
-from authlib.integrations.starlette_client import OAuth
-from starlette.responses import RedirectResponse
-
from auth.identity import Identity
from auth.tokenstorage import TokenStorage
+from authlib.integrations.starlette_client import OAuth
from settings import FRONTEND_URL, OAUTH_CLIENTS
+from starlette.responses import RedirectResponse
oauth = OAuth()
diff --git a/auth/tokenstorage.py b/auth/tokenstorage.py
index b5a5bc39..79a1a9b6 100644
--- a/auth/tokenstorage.py
+++ b/auth/tokenstorage.py
@@ -1,7 +1,6 @@
-from datetime import datetime, timedelta, timezone
-
from auth.jwtcodec import JWTCodec
from base.redis import redis
+from datetime import datetime, timedelta, timezone
from settings import ONETIME_TOKEN_LIFE_SPAN, SESSION_TOKEN_LIFE_SPAN
from validations.auth import AuthInput
@@ -35,7 +34,7 @@ class SessionToken:
class TokenStorage:
@staticmethod
async def get(token_key):
- print('[tokenstorage.get] ' + token_key)
+ print("[tokenstorage.get] " + token_key)
# 2041-user@domain.zn-eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VyX2lkIjoyMDQxLCJ1c2VybmFtZSI6ImFudG9uLnJld2luK3Rlc3QtbG9hZGNoYXRAZ21haWwuY29tIiwiZXhwIjoxNjcxNzgwNjE2LCJpYXQiOjE2NjkxODg2MTYsImlzcyI6ImRpc2NvdXJzIn0.Nml4oV6iMjMmc6xwM7lTKEZJKBXvJFEIZ-Up1C1rITQ
return await redis.execute("GET", token_key)
diff --git a/base/orm.py b/base/orm.py
index 02105f51..0ebb8de7 100644
--- a/base/orm.py
+++ b/base/orm.py
@@ -1,11 +1,9 @@
-from typing import Any, Callable, Dict, Generic, TypeVar
-
-from sqlalchemy import Column, Integer, create_engine
+from settings import DB_URL
+from sqlalchemy import Column, create_engine, Integer
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import Session
from sqlalchemy.sql.schema import Table
-
-from settings import DB_URL
+from typing import Any, Callable, Dict, Generic, TypeVar
engine = create_engine(DB_URL, echo=False, pool_size=10, max_overflow=20)
diff --git a/base/redis.py b/base/redis.py
index d5d4babd..52a49caa 100644
--- a/base/redis.py
+++ b/base/redis.py
@@ -1,7 +1,5 @@
-from asyncio import sleep
-
from aioredis import from_url
-
+from asyncio import sleep
from settings import REDIS_URL
diff --git a/main.py b/main.py
index 3f839ab5..8c4a7670 100644
--- a/main.py
+++ b/main.py
@@ -1,21 +1,12 @@
-import asyncio
-import os
-from importlib import import_module
-from os.path import exists
-
from ariadne import load_schema_from_path, make_executable_schema
from ariadne.asgi import GraphQL
-from starlette.applications import Starlette
-from starlette.middleware import Middleware
-from starlette.middleware.authentication import AuthenticationMiddleware
-from starlette.middleware.sessions import SessionMiddleware
-from starlette.routing import Route
-
from auth.authenticate import JWTAuthenticate
from auth.oauth import oauth_authorize, oauth_login
from base.redis import redis
from base.resolvers import resolvers
+from importlib import import_module
from orm import init_tables
+from os.path import exists
from resolvers.auth import confirm_email_handler
from resolvers.upload import upload_handler
from services.main import storages_init
@@ -25,6 +16,14 @@ from services.stat.viewed import ViewedStorage
# from services.zine.gittask import GitTask
from settings import DEV_SERVER_PID_FILE_NAME, SENTRY_DSN, SESSION_SECRET_KEY
+from starlette.applications import Starlette
+from starlette.middleware import Middleware
+from starlette.middleware.authentication import AuthenticationMiddleware
+from starlette.middleware.sessions import SessionMiddleware
+from starlette.routing import Route
+
+import asyncio
+import os
import_module("resolvers")
schema = make_executable_schema(load_schema_from_path("schema.graphql"), resolvers) # type: ignore
@@ -51,7 +50,7 @@ async def start_up():
sentry_sdk.init(SENTRY_DSN)
except Exception as e:
- print('[sentry] init error')
+ print("[sentry] init error")
print(e)
@@ -60,7 +59,7 @@ async def dev_start_up():
await redis.connect()
return
else:
- with open(DEV_SERVER_PID_FILE_NAME, 'w', encoding='utf-8') as f:
+ with open(DEV_SERVER_PID_FILE_NAME, "w", encoding="utf-8") as f:
f.write(str(os.getpid()))
await start_up()
@@ -75,7 +74,7 @@ routes = [
Route("/oauth/{provider}", endpoint=oauth_login),
Route("/oauth-authorize", endpoint=oauth_authorize),
Route("/confirm/{token}", endpoint=confirm_email_handler),
- Route("/upload", endpoint=upload_handler, methods=['POST']),
+ Route("/upload", endpoint=upload_handler, methods=["POST"]),
Route("/subscribe/{user_id}", endpoint=sse_subscribe_handler),
]
diff --git a/migration/__init__.py b/migration/__init__.py
index 17cc5ffd..bf1ba8d8 100644
--- a/migration/__init__.py
+++ b/migration/__init__.py
@@ -1,18 +1,12 @@
""" cmd managed migration """
-import asyncio
-import gc
-import json
-import sys
from datetime import datetime, timezone
-
-import bs4
-
from migration.export import export_mdx
from migration.tables.comments import migrate as migrateComment
from migration.tables.comments import migrate_2stage as migrateComment_2stage
from migration.tables.content_items import get_shout_slug
from migration.tables.content_items import migrate as migrateShout
-from migration.tables.remarks import migrate as migrateRemark
+
+# from migration.tables.remarks import migrate as migrateRemark
from migration.tables.topics import migrate as migrateTopic
from migration.tables.users import migrate as migrateUser
from migration.tables.users import migrate_2stage as migrateUser_2stage
@@ -20,6 +14,12 @@ from migration.tables.users import post_migrate as users_post_migrate
from orm import init_tables
from orm.reaction import Reaction
+import asyncio
+import bs4
+import gc
+import json
+import sys
+
TODAY = datetime.strftime(datetime.now(tz=timezone.utc), "%Y%m%d")
OLD_DATE = "2016-03-05 22:22:00.350000"
@@ -111,7 +111,7 @@ async def shouts_handle(storage, args):
# print main counter
counter += 1
print(
- '[migration] shouts_handle %d: %s @%s'
+ "[migration] shouts_handle %d: %s @%s"
% ((counter + 1), shout_dict["slug"], author["slug"])
)
@@ -132,13 +132,13 @@ async def shouts_handle(storage, args):
print("[migration] " + str(anonymous_author) + " authored by @anonymous")
-async def remarks_handle(storage):
- print("[migration] comments")
- c = 0
- for entry_remark in storage["remarks"]["data"]:
- remark = await migrateRemark(entry_remark, storage)
- c += 1
- print("[migration] " + str(c) + " remarks migrated")
+# async def remarks_handle(storage):
+# print("[migration] comments")
+# c = 0
+# for entry_remark in storage["remarks"]["data"]:
+# remark = await migrateRemark(entry_remark, storage)
+# c += 1
+# print("[migration] " + str(c) + " remarks migrated")
async def comments_handle(storage):
@@ -149,9 +149,9 @@ async def comments_handle(storage):
for oldcomment in storage["reactions"]["data"]:
if not oldcomment.get("deleted"):
reaction = await migrateComment(oldcomment, storage)
- if type(reaction) == str:
+ if isinstance(reaction, str):
missed_shouts[reaction] = oldcomment
- elif type(reaction) == Reaction:
+ elif isinstance(reaction, Reaction):
reaction = reaction.dict()
rid = reaction["id"]
oid = reaction["oid"]
diff --git a/migration/bson2json.py b/migration/bson2json.py
index cff33b28..66507791 100644
--- a/migration/bson2json.py
+++ b/migration/bson2json.py
@@ -1,11 +1,10 @@
+from .utils import DateTimeEncoder
+
+import bson
import gc
import json
import os
-import bson
-
-from .utils import DateTimeEncoder
-
def json_tables():
print("[migration] unpack dump/discours/*.bson to migration/data/*.json")
@@ -19,7 +18,7 @@ def json_tables():
"remarks": [],
}
for table in data.keys():
- print('[migration] bson2json for ' + table)
+ print("[migration] bson2json for " + table)
gc.collect()
lc = []
bs = open("dump/discours/" + table + ".bson", "rb").read()
diff --git a/migration/export.py b/migration/export.py
index 42004ee3..4105a220 100644
--- a/migration/export.py
+++ b/migration/export.py
@@ -1,11 +1,10 @@
-import json
-import os
+from .extract import extract_html, extract_media
+from .utils import DateTimeEncoder
from datetime import datetime, timezone
import frontmatter
-
-from .extract import extract_html, extract_media
-from .utils import DateTimeEncoder
+import json
+import os
OLD_DATE = "2016-03-05 22:22:00.350000"
EXPORT_DEST = "../discoursio-web/data/"
diff --git a/migration/extract.py b/migration/extract.py
index 511e68ed..eca8f8d0 100644
--- a/migration/extract.py
+++ b/migration/extract.py
@@ -1,9 +1,11 @@
+from bs4 import BeautifulSoup
+
import base64
import os
import re
-import uuid
-from bs4 import BeautifulSoup
+# import uuid
+
TOOLTIP_REGEX = r"(\/\/\/(.+)\/\/\/)"
contentDir = os.path.join(
@@ -26,40 +28,40 @@ def replace_tooltips(body):
return newbody
-def extract_footnotes(body, shout_dict):
- parts = body.split("&&&")
- lll = len(parts)
- newparts = list(parts)
- placed = False
- if lll & 1:
- if lll > 1:
- i = 1
- print("[extract] found %d footnotes in body" % (lll - 1))
- for part in parts[1:]:
- if i & 1:
- placed = True
- if 'a class="footnote-url" href=' in part:
- print("[extract] footnote: " + part)
- fn = 'a class="footnote-url" href="'
- exxtracted_link = part.split(fn, 1)[1].split('"', 1)[0]
- extracted_body = part.split(fn, 1)[1].split('>', 1)[1].split('', 1)[0]
- print("[extract] footnote link: " + extracted_link)
- with local_session() as session:
- Reaction.create(
- {
- "shout": shout_dict['id'],
- "kind": ReactionKind.FOOTNOTE,
- "body": extracted_body,
- "range": str(body.index(fn + link) - len('<'))
- + ':'
- + str(body.index(extracted_body) + len('')),
- }
- )
- newparts[i] = "ℹ️"
- else:
- newparts[i] = part
- i += 1
- return ("".join(newparts), placed)
+# def extract_footnotes(body, shout_dict):
+# parts = body.split("&&&")
+# lll = len(parts)
+# newparts = list(parts)
+# placed = False
+# if lll & 1:
+# if lll > 1:
+# i = 1
+# print("[extract] found %d footnotes in body" % (lll - 1))
+# for part in parts[1:]:
+# if i & 1:
+# placed = True
+# if 'a class="footnote-url" href=' in part:
+# print("[extract] footnote: " + part)
+# fn = 'a class="footnote-url" href="'
+# # exxtracted_link = part.split(fn, 1)[1].split('"', 1)[0]
+# extracted_body = part.split(fn, 1)[1].split(">", 1)[1].split("", 1)[0]
+# print("[extract] footnote link: " + extracted_link)
+# with local_session() as session:
+# Reaction.create(
+# {
+# "shout": shout_dict["id"],
+# "kind": ReactionKind.FOOTNOTE,
+# "body": extracted_body,
+# "range": str(body.index(fn + link) - len("<"))
+# + ":"
+# + str(body.index(extracted_body) + len("")),
+# }
+# )
+# newparts[i] = "ℹ️"
+# else:
+# newparts[i] = part
+# i += 1
+# return ("".join(newparts), placed)
def place_tooltips(body):
@@ -228,7 +230,6 @@ di = "data:image"
def extract_md_images(body, prefix):
- newbody = ""
body = (
body.replace("\n! [](" + di, "\n 
.replace("\n[](" + di, "\n
@@ -236,10 +237,10 @@ def extract_md_images(body, prefix):
)
parts = body.split(di)
if len(parts) > 1:
- newbody = extract_dataimages(parts, prefix)
+ new_body = extract_dataimages(parts, prefix)
else:
- newbody = body
- return newbody
+ new_body = body
+ return new_body
def cleanup_md(body):
@@ -262,28 +263,28 @@ def cleanup_md(body):
return newbody
-def extract_md(body, shout_dict=None):
- newbody = body
- if newbody:
- newbody = cleanup_md(newbody)
- if not newbody:
- raise Exception("cleanup error")
-
- if shout_dict:
- uid = shout_dict['id'] or uuid.uuid4()
- newbody = extract_md_images(newbody, uid)
- if not newbody:
- raise Exception("extract_images error")
-
- newbody, placed = extract_footnotes(body, shout_dict)
- if not newbody:
- raise Exception("extract_footnotes error")
-
- return newbody
+# def extract_md(body, shout_dict=None):
+# newbody = body
+# if newbody:
+# newbody = cleanup_md(newbody)
+# if not newbody:
+# raise Exception("cleanup error")
+#
+# if shout_dict:
+# uid = shout_dict["id"] or uuid.uuid4()
+# newbody = extract_md_images(newbody, uid)
+# if not newbody:
+# raise Exception("extract_images error")
+#
+# newbody, placed = extract_footnotes(body, shout_dict)
+# if not newbody:
+# raise Exception("extract_footnotes error")
+#
+# return newbody
def extract_media(entry):
- '''normalized media extraction method'''
+ """normalized media extraction method"""
# media [ { title pic url body } ]}
kind = entry.get("type")
if not kind:
@@ -398,16 +399,14 @@ def cleanup_html(body: str) -> str:
return new_body
-def extract_html(entry, shout_id=None, cleanup=False):
- body_orig = (entry.get("body") or "").replace('\(', '(').replace('\)', ')')
+def extract_html(entry, cleanup=False):
+ body_orig = (entry.get("body") or "").replace(r"\(", "(").replace(r"\)", ")")
if cleanup:
# we do that before bs parsing to catch the invalid html
body_clean = cleanup_html(body_orig)
if body_clean != body_orig:
print(f"[migration] html cleaned for slug {entry.get('slug', None)}")
body_orig = body_clean
- if shout_id:
- extract_footnotes(body_orig, shout_id)
body_html = str(BeautifulSoup(body_orig, features="html.parser"))
if cleanup:
# we do that after bs parsing because it can add dummy tags
diff --git a/migration/html2text/__init__.py b/migration/html2text/__init__.py
index 6b87f297..c99afc59 100644
--- a/migration/html2text/__init__.py
+++ b/migration/html2text/__init__.py
@@ -1,13 +1,5 @@
"""html2text: Turn HTML into equivalent Markdown-structured text."""
-import html.entities
-import html.parser
-import re
-import string
-import urllib.parse as urlparse
-from textwrap import wrap
-from typing import Dict, List, Optional, Tuple, Union
-
from . import config
from .elements import AnchorElement, ListElement
from .typing import OutCallback
@@ -26,6 +18,14 @@ from .utils import (
skipwrap,
unifiable_n,
)
+from textwrap import wrap
+from typing import Dict, List, Optional, Tuple, Union
+
+import html.entities
+import html.parser
+import re
+import string
+import urllib.parse as urlparse
__version__ = (2020, 1, 16)
diff --git a/migration/html2text/cli.py b/migration/html2text/cli.py
index 62e0738f..f6cf3c57 100644
--- a/migration/html2text/cli.py
+++ b/migration/html2text/cli.py
@@ -1,8 +1,8 @@
+from . import __version__, config, HTML2Text
+
import argparse
import sys
-from . import HTML2Text, __version__, config
-
# noinspection DuplicatedCode
def main() -> None:
diff --git a/migration/html2text/utils.py b/migration/html2text/utils.py
index fd6a16c2..545bbd17 100644
--- a/migration/html2text/utils.py
+++ b/migration/html2text/utils.py
@@ -1,7 +1,7 @@
-import html.entities
+from . import config
from typing import Dict, List, Optional
-from . import config
+import html.entities
unifiable_n = {
html.entities.name2codepoint[k]: v for k, v in config.UNIFIABLE.items() if k != "nbsp"
diff --git a/migration/tables/comments.py b/migration/tables/comments.py
index 092850c8..13d2809d 100644
--- a/migration/tables/comments.py
+++ b/migration/tables/comments.py
@@ -1,8 +1,6 @@
-from datetime import datetime, timezone
-
-from dateutil.parser import parse as date_parse
-
from base.orm import local_session
+from datetime import datetime, timezone
+from dateutil.parser import parse as date_parse
from migration.html2text import html2text
from orm.reaction import Reaction, ReactionKind
from orm.shout import Shout, ShoutReactionsFollower
@@ -30,12 +28,12 @@ def auto_followers(session, topics, reaction_dict):
tf = (
session.query(TopicFollower)
.where(TopicFollower.follower == reaction_dict["createdBy"])
- .filter(TopicFollower.topic == t['id'])
+ .filter(TopicFollower.topic == t["id"])
.first()
)
if not tf:
topic_following = TopicFollower.create(
- follower=reaction_dict["createdBy"], topic=t['id'], auto=True
+ follower=reaction_dict["createdBy"], topic=t["id"], auto=True
)
session.add(topic_following)
@@ -57,13 +55,13 @@ def migrate_ratings(session, entry, reaction_dict):
rr = Reaction.create(**re_reaction_dict)
following2 = (
session.query(ShoutReactionsFollower)
- .where(ShoutReactionsFollower.follower == re_reaction_dict['createdBy'])
+ .where(ShoutReactionsFollower.follower == re_reaction_dict["createdBy"])
.filter(ShoutReactionsFollower.shout == rr.shout)
.first()
)
if not following2:
following2 = ShoutReactionsFollower.create(
- follower=re_reaction_dict['createdBy'], shout=rr.shout, auto=True
+ follower=re_reaction_dict["createdBy"], shout=rr.shout, auto=True
)
session.add(following2)
session.add(rr)
@@ -160,9 +158,9 @@ async def migrate(entry, storage):
def migrate_2stage(old_comment, idmap):
- if old_comment.get('body'):
- new_id = idmap.get(old_comment.get('oid'))
- new_id = idmap.get(old_comment.get('_id'))
+ if old_comment.get("body"):
+ new_id = idmap.get(old_comment.get("oid"))
+ new_id = idmap.get(old_comment.get("_id"))
if new_id:
new_replyto_id = None
old_replyto_id = old_comment.get("replyTo")
diff --git a/migration/tables/content_items.py b/migration/tables/content_items.py
index 92a97c24..053a8a97 100644
--- a/migration/tables/content_items.py
+++ b/migration/tables/content_items.py
@@ -1,18 +1,17 @@
-import json
-import re
-from datetime import datetime, timezone
-
-from dateutil.parser import parse as date_parse
-from sqlalchemy.exc import IntegrityError
-from transliterate import translit
-
from base.orm import local_session
+from datetime import datetime, timezone
+from dateutil.parser import parse as date_parse
from migration.extract import extract_html, extract_media
from orm.reaction import Reaction, ReactionKind
from orm.shout import Shout, ShoutReactionsFollower, ShoutTopic
from orm.topic import Topic, TopicFollower
from orm.user import User
from services.stat.viewed import ViewedStorage
+from sqlalchemy.exc import IntegrityError
+from transliterate import translit
+
+import json
+import re
OLD_DATE = "2016-03-05 22:22:00.350000"
ts = datetime.now(tz=timezone.utc)
@@ -35,7 +34,7 @@ def get_shout_slug(entry):
slug = friend.get("slug", "")
if slug:
break
- slug = re.sub('[^0-9a-zA-Z]+', '-', slug)
+ slug = re.sub("[^0-9a-zA-Z]+", "-", slug)
return slug
@@ -43,27 +42,27 @@ def create_author_from_app(app):
user = None
userdata = None
# check if email is used
- if app['email']:
+ if app["email"]:
with local_session() as session:
- user = session.query(User).where(User.email == app['email']).first()
+ user = session.query(User).where(User.email == app["email"]).first()
if not user:
# print('[migration] app %r' % app)
- name = app.get('name')
+ name = app.get("name")
if name:
slug = translit(name, "ru", reversed=True).lower()
- slug = re.sub('[^0-9a-zA-Z]+', '-', slug)
- print('[migration] created slug %s' % slug)
+ slug = re.sub("[^0-9a-zA-Z]+", "-", slug)
+ print("[migration] created slug %s" % slug)
# check if slug is used
if slug:
user = session.query(User).where(User.slug == slug).first()
# get slug from email
if user:
- slug = app['email'].split('@')[0]
+ slug = app["email"].split("@")[0]
user = session.query(User).where(User.slug == slug).first()
# one more try
if user:
- slug += '-author'
+ slug += "-author"
user = session.query(User).where(User.slug == slug).first()
# create user with application data
@@ -81,7 +80,7 @@ def create_author_from_app(app):
user = User.create(**userdata)
session.add(user)
session.commit()
- userdata['id'] = user.id
+ userdata["id"] = user.id
userdata = user.dict()
return userdata
@@ -119,14 +118,14 @@ async def get_user(entry, storage):
elif user_oid:
userdata = storage["users"]["by_oid"].get(user_oid)
if not userdata:
- print('no userdata by oid, anonymous')
+ print("no userdata by oid, anonymous")
userdata = anondict
print(app)
# cleanup slug
if userdata:
slug = userdata.get("slug", "")
if slug:
- slug = re.sub('[^0-9a-zA-Z]+', '-', slug)
+ slug = re.sub("[^0-9a-zA-Z]+", "-", slug)
userdata["slug"] = slug
else:
userdata = anondict
@@ -160,7 +159,7 @@ async def migrate(entry, storage):
}
# main topic patch
- r['mainTopic'] = r['topics'][0]
+ r["mainTopic"] = r["topics"][0]
# published author auto-confirm
if entry.get("published"):
@@ -183,7 +182,7 @@ async def migrate(entry, storage):
shout_dict["oid"] = entry.get("_id", "")
shout = await create_shout(shout_dict)
except IntegrityError as e:
- print('[migration] create_shout integrity error', e)
+ print("[migration] create_shout integrity error", e)
shout = await resolve_create_shout(shout_dict)
except Exception as e:
raise Exception(e)
@@ -202,7 +201,7 @@ async def migrate(entry, storage):
# shout views
await ViewedStorage.increment(
- shout_dict["slug"], amount=entry.get("views", 1), viewer='old-discours'
+ shout_dict["slug"], amount=entry.get("views", 1), viewer="old-discours"
)
# del shout_dict['ratings']
@@ -240,7 +239,7 @@ async def add_topics_follower(entry, storage, user):
session.add(tf)
session.commit()
except IntegrityError:
- print('[migration.shout] hidden by topic ' + tpc.slug)
+ print("[migration.shout] hidden by topic " + tpc.slug)
# main topic
maintopic = storage["replacements"].get(topics_by_oid.get(category, {}).get("slug"))
if maintopic in ttt:
@@ -261,7 +260,7 @@ async def process_user(userdata, storage, oid):
if not user:
try:
slug = userdata["slug"].lower().strip()
- slug = re.sub('[^0-9a-zA-Z]+', '-', slug)
+ slug = re.sub("[^0-9a-zA-Z]+", "-", slug)
userdata["slug"] = slug
user = User.create(**userdata)
session.add(user)
@@ -289,9 +288,9 @@ async def resolve_create_shout(shout_dict):
s = session.query(Shout).filter(Shout.slug == shout_dict["slug"]).first()
bump = False
if s:
- if s.createdAt != shout_dict['createdAt']:
+ if s.createdAt != shout_dict["createdAt"]:
# create new with different slug
- shout_dict["slug"] += '-' + shout_dict["layout"]
+ shout_dict["slug"] += "-" + shout_dict["layout"]
try:
await create_shout(shout_dict)
except IntegrityError as e:
diff --git a/migration/tables/remarks.py b/migration/tables/remarks.py
index 09957ed4..e133050f 100644
--- a/migration/tables/remarks.py
+++ b/migration/tables/remarks.py
@@ -5,24 +5,24 @@ from orm.reaction import Reaction, ReactionKind
def migrate(entry, storage):
- post_oid = entry['contentItem']
+ post_oid = entry["contentItem"]
print(post_oid)
- shout_dict = storage['shouts']['by_oid'].get(post_oid)
+ shout_dict = storage["shouts"]["by_oid"].get(post_oid)
if shout_dict:
- print(shout_dict['body'])
+ print(shout_dict["body"])
remark = {
- "shout": shout_dict['id'],
- "body": extract_md(html2text(entry['body']), shout_dict),
+ "shout": shout_dict["id"],
+ "body": extract_md(html2text(entry["body"]), shout_dict),
"kind": ReactionKind.REMARK,
}
- if entry.get('textBefore'):
- remark['range'] = (
- str(shout_dict['body'].index(entry['textBefore'] or ''))
- + ':'
+ if entry.get("textBefore"):
+ remark["range"] = (
+ str(shout_dict["body"].index(entry["textBefore"] or ""))
+ + ":"
+ str(
- shout_dict['body'].index(entry['textAfter'] or '')
- + len(entry['textAfter'] or '')
+ shout_dict["body"].index(entry["textAfter"] or "")
+ + len(entry["textAfter"] or "")
)
)
diff --git a/migration/tables/users.py b/migration/tables/users.py
index 46f2e825..40c80f21 100644
--- a/migration/tables/users.py
+++ b/migration/tables/users.py
@@ -1,11 +1,10 @@
-import re
-
+from base.orm import local_session
from bs4 import BeautifulSoup
from dateutil.parser import parse
+from orm.user import AuthorFollower, User, UserRating
from sqlalchemy.exc import IntegrityError
-from base.orm import local_session
-from orm.user import AuthorFollower, User, UserRating
+import re
def migrate(entry):
@@ -33,12 +32,12 @@ def migrate(entry):
if entry.get("profile"):
# slug
slug = entry["profile"].get("path").lower()
- slug = re.sub('[^0-9a-zA-Z]+', '-', slug).strip()
+ slug = re.sub("[^0-9a-zA-Z]+", "-", slug).strip()
user_dict["slug"] = slug
bio = (
(entry.get("profile", {"bio": ""}).get("bio") or "")
- .replace('\(', '(')
- .replace('\)', ')')
+ .replace(r"\(", "(")
+ .replace(r"\)", ")")
)
bio_text = BeautifulSoup(bio, features="lxml").text
@@ -144,7 +143,7 @@ def migrate_2stage(entry, id_map):
}
user_rating = UserRating.create(**user_rating_dict)
- if user_rating_dict['value'] > 0:
+ if user_rating_dict["value"] > 0:
af = AuthorFollower.create(author=user.id, follower=rater.id, auto=True)
session.add(af)
session.add(user_rating)
diff --git a/orm/collection.py b/orm/collection.py
index c9975b62..1c432727 100644
--- a/orm/collection.py
+++ b/orm/collection.py
@@ -1,8 +1,6 @@
-from datetime import datetime
-
-from sqlalchemy import Column, DateTime, ForeignKey, String
-
from base.orm import Base
+from datetime import datetime
+from sqlalchemy import Column, DateTime, ForeignKey, String
class ShoutCollection(Base):
diff --git a/orm/community.py b/orm/community.py
index 7045e1aa..c31732a0 100644
--- a/orm/community.py
+++ b/orm/community.py
@@ -1,8 +1,6 @@
-from datetime import datetime
-
-from sqlalchemy import Column, DateTime, ForeignKey, String
-
from base.orm import Base, local_session
+from datetime import datetime
+from sqlalchemy import Column, DateTime, ForeignKey, String
class CommunityFollower(Base):
@@ -33,4 +31,4 @@ class Community(Base):
session.add(d)
session.commit()
Community.default_community = d
- print('[orm] default community id: %s' % d.id)
+ print("[orm] default community id: %s" % d.id)
diff --git a/orm/notification.py b/orm/notification.py
index a838ce6b..2fdc9d5d 100644
--- a/orm/notification.py
+++ b/orm/notification.py
@@ -1,11 +1,9 @@
+from base.orm import Base
from datetime import datetime
from enum import Enum as Enumeration
-
from sqlalchemy import Boolean, Column, DateTime, Enum, ForeignKey, Integer
from sqlalchemy.dialects.postgresql import JSONB
-from base.orm import Base
-
class NotificationType(Enumeration):
NEW_COMMENT = 1
diff --git a/orm/rbac.py b/orm/rbac.py
index 80914949..bb7eb34b 100644
--- a/orm/rbac.py
+++ b/orm/rbac.py
@@ -1,9 +1,8 @@
-import warnings
-
+from base.orm import Base, local_session, REGISTRY
from sqlalchemy import Column, ForeignKey, String, TypeDecorator, UniqueConstraint
from sqlalchemy.orm import relationship
-from base.orm import REGISTRY, Base, engine, local_session
+import warnings
# Role Based Access Control #
@@ -165,14 +164,14 @@ class Permission(Base):
)
-if __name__ == "__main__":
- Base.metadata.create_all(engine)
- ops = [
- Permission(role=1, operation=1, resource=1),
- Permission(role=1, operation=2, resource=1),
- Permission(role=1, operation=3, resource=1),
- Permission(role=1, operation=4, resource=1),
- Permission(role=2, operation=4, resource=1),
- ]
- global_session.add_all(ops)
- global_session.commit()
+# if __name__ == "__main__":
+# Base.metadata.create_all(engine)
+# ops = [
+# Permission(role=1, operation=1, resource=1),
+# Permission(role=1, operation=2, resource=1),
+# Permission(role=1, operation=3, resource=1),
+# Permission(role=1, operation=4, resource=1),
+# Permission(role=2, operation=4, resource=1),
+# ]
+# global_session.add_all(ops)
+# global_session.commit()
diff --git a/orm/reaction.py b/orm/reaction.py
index f3680b6d..89fed9eb 100644
--- a/orm/reaction.py
+++ b/orm/reaction.py
@@ -1,10 +1,8 @@
+from base.orm import Base
from datetime import datetime
from enum import Enum as Enumeration
-
from sqlalchemy import Column, DateTime, Enum, ForeignKey, String
-from base.orm import Base
-
class ReactionKind(Enumeration):
AGREE = 1 # +1
diff --git a/orm/shout.py b/orm/shout.py
index 0d980b8a..7a77b66c 100644
--- a/orm/shout.py
+++ b/orm/shout.py
@@ -1,12 +1,10 @@
-from datetime import datetime
-
-from sqlalchemy import JSON, Boolean, Column, DateTime, ForeignKey, Integer, String
-from sqlalchemy.orm import column_property, relationship
-
from base.orm import Base, local_session
+from datetime import datetime
from orm.reaction import Reaction
from orm.topic import Topic
from orm.user import User
+from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, JSON, String
+from sqlalchemy.orm import column_property, relationship
class ShoutTopic(Base):
@@ -70,7 +68,7 @@ class Shout(Base):
# TODO: these field should be used or modified
community = Column(ForeignKey("community.id"), default=1)
- lang = Column(String, nullable=False, default='ru', comment="Language")
+ lang = Column(String, nullable=False, default="ru", comment="Language")
mainTopic = Column(ForeignKey("topic.slug"), nullable=True)
visibility = Column(String, nullable=True) # owner authors community public
versionOf = Column(ForeignKey("shout.id"), nullable=True)
@@ -81,7 +79,12 @@ class Shout(Base):
with local_session() as session:
s = session.query(Shout).first()
if not s:
- entry = {"slug": "genesis-block", "body": "", "title": "Ничего", "lang": "ru"}
+ entry = {
+ "slug": "genesis-block",
+ "body": "",
+ "title": "Ничего",
+ "lang": "ru",
+ }
s = Shout.create(**entry)
session.add(s)
session.commit()
diff --git a/orm/topic.py b/orm/topic.py
index b0d7cc01..6da93732 100644
--- a/orm/topic.py
+++ b/orm/topic.py
@@ -1,8 +1,6 @@
-from datetime import datetime
-
-from sqlalchemy import Boolean, Column, DateTime, ForeignKey, String
-
from base.orm import Base
+from datetime import datetime
+from sqlalchemy import Boolean, Column, DateTime, ForeignKey, String
class TopicFollower(Base):
diff --git a/orm/user.py b/orm/user.py
index d10be411..d76c4627 100644
--- a/orm/user.py
+++ b/orm/user.py
@@ -1,11 +1,10 @@
-from datetime import datetime
-
-from sqlalchemy import JSON as JSONType
-from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, String
-from sqlalchemy.orm import relationship
-
from base.orm import Base, local_session
+from datetime import datetime
from orm.rbac import Role
+from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer
+from sqlalchemy import JSON as JSONType
+from sqlalchemy import String
+from sqlalchemy.orm import relationship
class UserRating(Base):
diff --git a/requirements-dev.txt b/requirements-dev.txt
index b2e99a01..31fbe456 100755
--- a/requirements-dev.txt
+++ b/requirements-dev.txt
@@ -3,3 +3,4 @@ brunette
flake8
mypy
pre-commit
+black
diff --git a/requirements.txt b/requirements.txt
index edbf46ff..a919e623 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -18,15 +18,12 @@ transliterate~=1.10.2
requests~=2.28.1
bcrypt>=4.0.0
bson~=0.5.10
-flake8
DateTime~=4.7
asyncio~=3.4.3
python-dateutil~=2.8.2
beautifulsoup4~=4.11.1
lxml
sentry-sdk>=1.14.0
-# sse_starlette
-graphql-ws
nltk~=3.8.1
pymystem3~=0.2.0
transformers~=4.28.1
diff --git a/resolvers/__init__.py b/resolvers/__init__.py
deleted file mode 100644
index 78ae7e22..00000000
--- a/resolvers/__init__.py
+++ /dev/null
@@ -1,35 +0,0 @@
-from resolvers.auth import (
- auth_send_link,
- confirm_email,
- get_current_user,
- is_email_used,
- login,
- register_by_email,
- sign_out,
-)
-from resolvers.create.editor import create_shout, delete_shout, update_shout
-from resolvers.create.migrate import markdown_body
-from resolvers.inbox.chats import create_chat, delete_chat, update_chat
-from resolvers.inbox.load import load_chats, load_messages_by, load_recipients
-from resolvers.inbox.messages import create_message, delete_message, mark_as_read, update_message
-from resolvers.inbox.search import search_recipients
-from resolvers.notifications import load_notifications
-from resolvers.zine.following import follow, unfollow
-from resolvers.zine.load import load_shout, load_shouts_by
-from resolvers.zine.profile import get_authors_all, load_authors_by, rate_user, update_profile
-from resolvers.zine.reactions import (
- create_reaction,
- delete_reaction,
- load_reactions_by,
- reactions_follow,
- reactions_unfollow,
- update_reaction,
-)
-from resolvers.zine.topics import (
- get_topic,
- topic_follow,
- topic_unfollow,
- topics_all,
- topics_by_author,
- topics_by_community,
-)
diff --git a/resolvers/auth.py b/resolvers/auth.py
index c28898e3..3ba15d9d 100644
--- a/resolvers/auth.py
+++ b/resolvers/auth.py
@@ -1,13 +1,5 @@
# -*- coding: utf-8 -*-
-import re
-from datetime import datetime, timezone
-from urllib.parse import quote_plus
-
-from graphql.type import GraphQLResolveInfo
-from starlette.responses import RedirectResponse
-from transliterate import translit
-
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
from auth.email import send_auth_email
@@ -23,8 +15,15 @@ from base.exceptions import (
)
from base.orm import local_session
from base.resolvers import mutation, query
+from datetime import datetime, timezone
+from graphql.type import GraphQLResolveInfo
from orm import Role, User
from settings import FRONTEND_URL, SESSION_TOKEN_HEADER
+from starlette.responses import RedirectResponse
+from transliterate import translit
+from urllib.parse import quote_plus
+
+import re
@mutation.field("getSession")
@@ -45,7 +44,7 @@ async def get_current_user(_, info):
async def confirm_email(_, info, token):
"""confirm owning email address"""
try:
- print('[resolvers.auth] confirm email by token')
+ print("[resolvers.auth] confirm email by token")
payload = JWTCodec.decode(token)
user_id = payload.user_id
await TokenStorage.get(f"{user_id}-{payload.username}-{token}")
@@ -68,9 +67,9 @@ async def confirm_email_handler(request):
token = request.path_params["token"] # one time
request.session["token"] = token
res = await confirm_email(None, {}, token)
- print('[resolvers.auth] confirm_email request: %r' % request)
+ print("[resolvers.auth] confirm_email request: %r" % request)
if "error" in res:
- raise BaseHttpException(res['error'])
+ raise BaseHttpException(res["error"])
else:
response = RedirectResponse(url=FRONTEND_URL)
response.set_cookie("token", res["token"]) # session token
@@ -87,22 +86,22 @@ def create_user(user_dict):
def generate_unique_slug(src):
- print('[resolvers.auth] generating slug from: ' + src)
+ print("[resolvers.auth] generating slug from: " + src)
slug = translit(src, "ru", reversed=True).replace(".", "-").lower()
- slug = re.sub('[^0-9a-zA-Z]+', '-', slug)
+ slug = re.sub("[^0-9a-zA-Z]+", "-", slug)
if slug != src:
- print('[resolvers.auth] translited name: ' + slug)
+ print("[resolvers.auth] translited name: " + slug)
c = 1
with local_session() as session:
user = session.query(User).where(User.slug == slug).first()
while user:
user = session.query(User).where(User.slug == slug).first()
- slug = slug + '-' + str(c)
+ slug = slug + "-" + str(c)
c += 1
if not user:
unique_slug = slug
- print('[resolvers.auth] ' + unique_slug)
- return quote_plus(unique_slug.replace('\'', '')).replace('+', '-')
+ print("[resolvers.auth] " + unique_slug)
+ return quote_plus(unique_slug.replace("'", "")).replace("+", "-")
@mutation.field("registerUser")
@@ -117,7 +116,7 @@ async def register_by_email(_, _info, email: str, password: str = "", name: str
slug = generate_unique_slug(name)
user = session.query(User).where(User.slug == slug).first()
if user:
- slug = generate_unique_slug(email.split('@')[0])
+ slug = generate_unique_slug(email.split("@")[0])
user_dict = {
"email": email,
"username": email, # will be used to store phone number or some messenger network id
diff --git a/resolvers/create/editor.py b/resolvers/create/editor.py
index d6db8bf6..6ec690f7 100644
--- a/resolvers/create/editor.py
+++ b/resolvers/create/editor.py
@@ -1,15 +1,13 @@
-from datetime import datetime, timezone
-
-from sqlalchemy import and_
-from sqlalchemy.orm import joinedload
-
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
from base.orm import local_session
from base.resolvers import mutation
+from datetime import datetime, timezone
from orm.shout import Shout, ShoutAuthor, ShoutTopic
from orm.topic import Topic
from resolvers.zine.reactions import reactions_follow, reactions_unfollow
+from sqlalchemy import and_
+from sqlalchemy.orm import joinedload
@mutation.field("createShout")
@@ -18,15 +16,15 @@ async def create_shout(_, info, inp):
auth: AuthCredentials = info.context["request"].auth
with local_session() as session:
- topics = session.query(Topic).filter(Topic.slug.in_(inp.get('topics', []))).all()
+ topics = session.query(Topic).filter(Topic.slug.in_(inp.get("topics", []))).all()
new_shout = Shout.create(
**{
"title": inp.get("title"),
- "subtitle": inp.get('subtitle'),
- "lead": inp.get('lead'),
- "description": inp.get('description'),
- "body": inp.get("body", ''),
+ "subtitle": inp.get("subtitle"),
+ "lead": inp.get("lead"),
+ "description": inp.get("description"),
+ "body": inp.get("body", ""),
"layout": inp.get("layout"),
"authors": inp.get("authors", []),
"slug": inp.get("slug"),
@@ -128,7 +126,10 @@ async def update_shout(_, info, shout_id, shout_input=None, publish=False):
]
shout_topics_to_remove = session.query(ShoutTopic).filter(
- and_(ShoutTopic.shout == shout.id, ShoutTopic.topic.in_(topic_to_unlink_ids))
+ and_(
+ ShoutTopic.shout == shout.id,
+ ShoutTopic.topic.in_(topic_to_unlink_ids),
+ )
)
for shout_topic_to_remove in shout_topics_to_remove:
@@ -136,13 +137,13 @@ async def update_shout(_, info, shout_id, shout_input=None, publish=False):
shout_input["mainTopic"] = shout_input["mainTopic"]["slug"]
- if shout_input["mainTopic"] == '':
+ if shout_input["mainTopic"] == "":
del shout_input["mainTopic"]
shout.update(shout_input)
updated = True
- if publish and shout.visibility == 'owner':
+ if publish and shout.visibility == "owner":
shout.visibility = "community"
shout.publishedAt = datetime.now(tz=timezone.utc)
updated = True
diff --git a/resolvers/create/migrate.py b/resolvers/create/migrate.py
index 9e849f86..028808b1 100644
--- a/resolvers/create/migrate.py
+++ b/resolvers/create/migrate.py
@@ -1,10 +1,10 @@
-from base.resolvers import query
-from migration.extract import extract_md
-from resolvers.auth import login_required
-
-
-@login_required
-@query.field("markdownBody")
-def markdown_body(_, info, body: str):
- body = extract_md(body)
- return body
+# from base.resolvers import query
+# from migration.extract import extract_md
+# from resolvers.auth import login_required
+#
+#
+# @login_required
+# @query.field("markdownBody")
+# def markdown_body(_, info, body: str):
+# body = extract_md(body)
+# return body
diff --git a/resolvers/inbox/chats.py b/resolvers/inbox/chats.py
index a589e870..95a31f69 100644
--- a/resolvers/inbox/chats.py
+++ b/resolvers/inbox/chats.py
@@ -1,13 +1,13 @@
-import json
-import uuid
-from datetime import datetime, timezone
-
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
from base.redis import redis
from base.resolvers import mutation
+from datetime import datetime, timezone
from validations.inbox import Chat
+import json
+import uuid
+
@mutation.field("updateChat")
@login_required
@@ -49,7 +49,7 @@ async def update_chat(_, info, chat_new: Chat):
async def create_chat(_, info, title="", members=[]):
auth: AuthCredentials = info.context["request"].auth
chat = {}
- print('create_chat members: %r' % members)
+ print("create_chat members: %r" % members)
if auth.user_id not in members:
members.append(int(auth.user_id))
@@ -71,8 +71,8 @@ async def create_chat(_, info, title="", members=[]):
chat = await redis.execute("GET", f"chats/{c.decode('utf-8')}")
if chat:
chat = json.loads(chat)
- if chat['title'] == "":
- print('[inbox] createChat found old chat')
+ if chat["title"] == "":
+ print("[inbox] createChat found old chat")
print(chat)
break
if chat:
@@ -105,7 +105,7 @@ async def delete_chat(_, info, chat_id: str):
chat = await redis.execute("GET", f"/chats/{chat_id}")
if chat:
chat = dict(json.loads(chat))
- if auth.user_id in chat['admins']:
+ if auth.user_id in chat["admins"]:
await redis.execute("DEL", f"chats/{chat_id}")
await redis.execute("SREM", "chats_by_user/" + str(auth.user_id), chat_id)
await redis.execute("COMMIT")
diff --git a/resolvers/inbox/load.py b/resolvers/inbox/load.py
index 43f8a07c..54ae75d5 100644
--- a/resolvers/inbox/load.py
+++ b/resolvers/inbox/load.py
@@ -1,5 +1,4 @@
-import json
-
+from .unread import get_unread_counter
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
from base.orm import local_session
@@ -8,13 +7,13 @@ from base.resolvers import query
from orm.user import User
from resolvers.zine.profile import followed_authors
-from .unread import get_unread_counter
+import json
# from datetime import datetime, timedelta, timezone
async def load_messages(chat_id: str, limit: int = 5, offset: int = 0, ids=[]):
- '''load :limit messages for :chat_id with :offset'''
+ """load :limit messages for :chat_id with :offset"""
messages = []
message_ids = []
if ids:
@@ -29,10 +28,10 @@ async def load_messages(chat_id: str, limit: int = 5, offset: int = 0, ids=[]):
if message_ids:
message_keys = [f"chats/{chat_id}/messages/{mid}" for mid in message_ids]
messages = await redis.mget(*message_keys)
- messages = [json.loads(msg.decode('utf-8')) for msg in messages]
+ messages = [json.loads(msg.decode("utf-8")) for msg in messages]
replies = []
for m in messages:
- rt = m.get('replyTo')
+ rt = m.get("replyTo")
if rt:
rt = int(rt)
if rt not in message_ids:
@@ -52,7 +51,7 @@ async def load_chats(_, info, limit: int = 50, offset: int = 0):
if cids:
cids = list(cids)[offset : offset + limit]
if not cids:
- print('[inbox.load] no chats were found')
+ print("[inbox.load] no chats were found")
cids = []
onliners = await redis.execute("SMEMBERS", "users-online")
if not onliners:
@@ -63,14 +62,14 @@ async def load_chats(_, info, limit: int = 50, offset: int = 0):
c = await redis.execute("GET", "chats/" + cid)
if c:
c = dict(json.loads(c))
- c['messages'] = await load_messages(cid, 5, 0)
- c['unread'] = await get_unread_counter(cid, auth.user_id)
+ c["messages"] = await load_messages(cid, 5, 0)
+ c["unread"] = await get_unread_counter(cid, auth.user_id)
with local_session() as session:
- c['members'] = []
+ c["members"] = []
for uid in c["users"]:
a = session.query(User).where(User.id == uid).first()
if a:
- c['members'].append(
+ c["members"].append(
{
"id": a.id,
"slug": a.slug,
@@ -87,16 +86,16 @@ async def load_chats(_, info, limit: int = 50, offset: int = 0):
@query.field("loadMessagesBy")
@login_required
async def load_messages_by(_, info, by, limit: int = 10, offset: int = 0):
- '''load :limit messages of :chat_id with :offset'''
+ """load :limit messages of :chat_id with :offset"""
auth: AuthCredentials = info.context["request"].auth
userchats = await redis.execute("SMEMBERS", "chats_by_user/" + str(auth.user_id))
- userchats = [c.decode('utf-8') for c in userchats]
+ userchats = [c.decode("utf-8") for c in userchats]
# print('[inbox] userchats: %r' % userchats)
if userchats:
# print('[inbox] loading messages by...')
messages = []
- by_chat = by.get('chat')
+ by_chat = by.get("chat")
if by_chat in userchats:
chat = await redis.execute("GET", f"chats/{by_chat}")
# print(chat)
@@ -104,7 +103,10 @@ async def load_messages_by(_, info, by, limit: int = 10, offset: int = 0):
return {"messages": [], "error": "chat not exist"}
# everyone's messages in filtered chat
messages = await load_messages(by_chat, limit, offset)
- return {"messages": sorted(list(messages), key=lambda m: m['createdAt']), "error": None}
+ return {
+ "messages": sorted(list(messages), key=lambda m: m["createdAt"]),
+ "error": None,
+ }
else:
return {"error": "Cannot access messages of this chat"}
diff --git a/resolvers/inbox/messages.py b/resolvers/inbox/messages.py
index 3d35105a..b3d2689f 100644
--- a/resolvers/inbox/messages.py
+++ b/resolvers/inbox/messages.py
@@ -1,16 +1,11 @@
-import asyncio
-import json
-from datetime import datetime, timezone
-from typing import Any
-
-from graphql.type import GraphQLResolveInfo
-
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
from base.redis import redis
from base.resolvers import mutation
-from services.following import Following, FollowingManager, FollowingResult
-from validations.inbox import Message
+from datetime import datetime, timezone
+from services.following import FollowingManager, FollowingResult
+
+import json
@mutation.field("createMessage")
@@ -27,15 +22,15 @@ async def create_message(_, info, chat: str, body: str, replyTo=None):
message_id = await redis.execute("GET", f"chats/{chat['id']}/next_message_id")
message_id = int(message_id)
new_message = {
- "chatId": chat['id'],
+ "chatId": chat["id"],
"id": message_id,
"author": auth.user_id,
"body": body,
"createdAt": int(datetime.now(tz=timezone.utc).timestamp()),
}
if replyTo:
- new_message['replyTo'] = replyTo
- chat['updatedAt'] = new_message['createdAt']
+ new_message["replyTo"] = replyTo
+ chat["updatedAt"] = new_message["createdAt"]
await redis.execute("SET", f"chats/{chat['id']}", json.dumps(chat))
print(f"[inbox] creating message {new_message}")
await redis.execute(
@@ -48,8 +43,8 @@ async def create_message(_, info, chat: str, body: str, replyTo=None):
for user_slug in users:
await redis.execute("LPUSH", f"chats/{chat['id']}/unread/{user_slug}", str(message_id))
- result = FollowingResult("NEW", 'chat', new_message)
- await FollowingManager.push('chat', result)
+ result = FollowingResult("NEW", "chat", new_message)
+ await FollowingManager.push("chat", result)
return {"message": new_message, "error": None}
@@ -76,8 +71,8 @@ async def update_message(_, info, chat_id: str, message_id: int, body: str):
await redis.execute("SET", f"chats/{chat_id}/messages/{message_id}", json.dumps(message))
- result = FollowingResult("UPDATED", 'chat', message)
- await FollowingManager.push('chat', result)
+ result = FollowingResult("UPDATED", "chat", message)
+ await FollowingManager.push("chat", result)
return {"message": message, "error": None}
@@ -106,7 +101,7 @@ async def delete_message(_, info, chat_id: str, message_id: int):
for user_id in users:
await redis.execute("LREM", f"chats/{chat_id}/unread/{user_id}", 0, str(message_id))
- result = FollowingResult("DELETED", 'chat', message)
+ result = FollowingResult("DELETED", "chat", message)
await FollowingManager.push(result)
return {}
diff --git a/resolvers/inbox/search.py b/resolvers/inbox/search.py
index 8a3f0c2d..510ce52c 100644
--- a/resolvers/inbox/search.py
+++ b/resolvers/inbox/search.py
@@ -1,14 +1,14 @@
-import json
-from datetime import datetime, timedelta, timezone
-
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
from base.orm import local_session
from base.redis import redis
from base.resolvers import query
+from datetime import datetime, timedelta, timezone
from orm.user import AuthorFollower, User
from resolvers.inbox.load import load_messages
+import json
+
@query.field("searchRecipients")
@login_required
@@ -59,22 +59,22 @@ async def search_user_chats(by, messages, user_id: int, limit, offset):
cids.union(set(await redis.execute("SMEMBERS", "chats_by_user/" + str(user_id))))
messages = []
- by_author = by.get('author')
+ by_author = by.get("author")
if by_author:
# all author's messages
cids.union(set(await redis.execute("SMEMBERS", f"chats_by_user/{by_author}")))
# author's messages in filtered chat
messages.union(set(filter(lambda m: m["author"] == by_author, list(messages))))
for c in cids:
- c = c.decode('utf-8')
+ c = c.decode("utf-8")
messages = await load_messages(c, limit, offset)
- body_like = by.get('body')
+ body_like = by.get("body")
if body_like:
# search in all messages in all user's chats
for c in cids:
# FIXME: use redis scan here
- c = c.decode('utf-8')
+ c = c.decode("utf-8")
mmm = await load_messages(c, limit, offset)
for m in mmm:
if body_like in m["body"]:
diff --git a/resolvers/notifications.py b/resolvers/notifications.py
index 98314d2e..3ece629e 100644
--- a/resolvers/notifications.py
+++ b/resolvers/notifications.py
@@ -1,10 +1,9 @@
-from sqlalchemy import and_, desc, select, update
-
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
from base.orm import local_session
from base.resolvers import mutation, query
from orm import Notification
+from sqlalchemy import and_, desc, select, update
@query.field("loadNotifications")
@@ -16,8 +15,8 @@ async def load_notifications(_, info, params=None):
auth: AuthCredentials = info.context["request"].auth
user_id = auth.user_id
- limit = params.get('limit', 50)
- offset = params.get('offset', 0)
+ limit = params.get("limit", 50)
+ offset = params.get("offset", 0)
q = (
select(Notification)
@@ -33,7 +32,7 @@ async def load_notifications(_, info, params=None):
total_unread_count = (
session.query(Notification)
- .where(and_(Notification.user == user_id, Notification.seen == False))
+ .where(and_(Notification.user == user_id, Notification.seen == False)) # noqa: E712
.count()
)
@@ -74,7 +73,7 @@ async def mark_all_notifications_as_read(_, info):
statement = (
update(Notification)
- .where(and_(Notification.user == user_id, Notification.seen == False))
+ .where(and_(Notification.user == user_id, Notification.seen == False)) # noqa: E712
.values(seen=True)
)
diff --git a/resolvers/upload.py b/resolvers/upload.py
index 3eee3358..9649222c 100644
--- a/resolvers/upload.py
+++ b/resolvers/upload.py
@@ -1,33 +1,33 @@
+from botocore.exceptions import BotoCoreError, ClientError
+from starlette.responses import JSONResponse
+
+import boto3
import os
import shutil
import tempfile
import uuid
-import boto3
-from botocore.exceptions import BotoCoreError, ClientError
-from starlette.responses import JSONResponse
-
-STORJ_ACCESS_KEY = os.environ.get('STORJ_ACCESS_KEY')
-STORJ_SECRET_KEY = os.environ.get('STORJ_SECRET_KEY')
-STORJ_END_POINT = os.environ.get('STORJ_END_POINT')
-STORJ_BUCKET_NAME = os.environ.get('STORJ_BUCKET_NAME')
-CDN_DOMAIN = os.environ.get('CDN_DOMAIN')
+STORJ_ACCESS_KEY = os.environ.get("STORJ_ACCESS_KEY")
+STORJ_SECRET_KEY = os.environ.get("STORJ_SECRET_KEY")
+STORJ_END_POINT = os.environ.get("STORJ_END_POINT")
+STORJ_BUCKET_NAME = os.environ.get("STORJ_BUCKET_NAME")
+CDN_DOMAIN = os.environ.get("CDN_DOMAIN")
async def upload_handler(request):
form = await request.form()
- file = form.get('file')
+ file = form.get("file")
if file is None:
- return JSONResponse({'error': 'No file uploaded'}, status_code=400)
+ return JSONResponse({"error": "No file uploaded"}, status_code=400)
file_name, file_extension = os.path.splitext(file.filename)
- key = 'files/' + str(uuid.uuid4()) + file_extension
+ key = "files/" + str(uuid.uuid4()) + file_extension
# Create an S3 client with Storj configuration
s3 = boto3.client(
- 's3',
+ "s3",
aws_access_key_id=STORJ_ACCESS_KEY,
aws_secret_access_key=STORJ_SECRET_KEY,
endpoint_url=STORJ_END_POINT,
@@ -45,10 +45,10 @@ async def upload_handler(request):
ExtraArgs={"ContentType": file.content_type},
)
- url = 'https://' + CDN_DOMAIN + '/' + key
+ url = "https://" + CDN_DOMAIN + "/" + key
- return JSONResponse({'url': url, 'originalFilename': file.filename})
+ return JSONResponse({"url": url, "originalFilename": file.filename})
except (BotoCoreError, ClientError) as e:
print(e)
- return JSONResponse({'error': 'Failed to upload file'}, status_code=500)
+ return JSONResponse({"error": "Failed to upload file"}, status_code=500)
diff --git a/resolvers/zine/following.py b/resolvers/zine/following.py
index 24935d5e..bc92371a 100644
--- a/resolvers/zine/following.py
+++ b/resolvers/zine/following.py
@@ -1,20 +1,12 @@
-import asyncio
-
-from graphql.type import GraphQLResolveInfo
-
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
-from base.orm import local_session
from base.resolvers import mutation
-from orm.shout import ShoutReactionsFollower
-from orm.topic import TopicFollower
# from resolvers.community import community_follow, community_unfollow
-from orm.user import AuthorFollower
from resolvers.zine.profile import author_follow, author_unfollow
from resolvers.zine.reactions import reactions_follow, reactions_unfollow
from resolvers.zine.topics import topic_follow, topic_unfollow
-from services.following import Following, FollowingManager, FollowingResult
+from services.following import FollowingManager, FollowingResult
@mutation.field("follow")
@@ -25,20 +17,20 @@ async def follow(_, info, what, slug):
try:
if what == "AUTHOR":
if author_follow(auth.user_id, slug):
- result = FollowingResult("NEW", 'author', slug)
- await FollowingManager.push('author', result)
+ result = FollowingResult("NEW", "author", slug)
+ await FollowingManager.push("author", result)
elif what == "TOPIC":
if topic_follow(auth.user_id, slug):
- result = FollowingResult("NEW", 'topic', slug)
- await FollowingManager.push('topic', result)
+ result = FollowingResult("NEW", "topic", slug)
+ await FollowingManager.push("topic", result)
elif what == "COMMUNITY":
if False: # TODO: use community_follow(auth.user_id, slug):
- result = FollowingResult("NEW", 'community', slug)
- await FollowingManager.push('community', result)
+ result = FollowingResult("NEW", "community", slug)
+ await FollowingManager.push("community", result)
elif what == "REACTIONS":
if reactions_follow(auth.user_id, slug):
- result = FollowingResult("NEW", 'shout', slug)
- await FollowingManager.push('shout', result)
+ result = FollowingResult("NEW", "shout", slug)
+ await FollowingManager.push("shout", result)
except Exception as e:
print(Exception(e))
return {"error": str(e)}
@@ -54,20 +46,20 @@ async def unfollow(_, info, what, slug):
try:
if what == "AUTHOR":
if author_unfollow(auth.user_id, slug):
- result = FollowingResult("DELETED", 'author', slug)
- await FollowingManager.push('author', result)
+ result = FollowingResult("DELETED", "author", slug)
+ await FollowingManager.push("author", result)
elif what == "TOPIC":
if topic_unfollow(auth.user_id, slug):
- result = FollowingResult("DELETED", 'topic', slug)
- await FollowingManager.push('topic', result)
+ result = FollowingResult("DELETED", "topic", slug)
+ await FollowingManager.push("topic", result)
elif what == "COMMUNITY":
if False: # TODO: use community_unfollow(auth.user_id, slug):
- result = FollowingResult("DELETED", 'community', slug)
- await FollowingManager.push('community', result)
+ result = FollowingResult("DELETED", "community", slug)
+ await FollowingManager.push("community", result)
elif what == "REACTIONS":
if reactions_unfollow(auth.user_id, slug):
- result = FollowingResult("DELETED", 'shout', slug)
- await FollowingManager.push('shout', result)
+ result = FollowingResult("DELETED", "shout", slug)
+ await FollowingManager.push("shout", result)
except Exception as e:
return {"error": str(e)}
diff --git a/resolvers/zine/load.py b/resolvers/zine/load.py
index 06f400fc..90d790ac 100644
--- a/resolvers/zine/load.py
+++ b/resolvers/zine/load.py
@@ -1,26 +1,24 @@
-from datetime import datetime, timedelta, timezone
-
-from sqlalchemy.orm import aliased, joinedload
-from sqlalchemy.sql.expression import and_, asc, case, desc, func, nulls_last, select, text
-
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
-from base.exceptions import ObjectNotExist, OperationNotAllowed
+from base.exceptions import ObjectNotExist
from base.orm import local_session
from base.resolvers import query
+from datetime import datetime, timedelta, timezone
from orm import TopicFollower
from orm.reaction import Reaction, ReactionKind
from orm.shout import Shout, ShoutAuthor, ShoutTopic
from orm.user import AuthorFollower
+from sqlalchemy.orm import aliased, joinedload
+from sqlalchemy.sql.expression import and_, asc, case, desc, func, nulls_last, select
def add_stat_columns(q):
aliased_reaction = aliased(Reaction)
q = q.outerjoin(aliased_reaction).add_columns(
- func.sum(aliased_reaction.id).label('reacted_stat'),
+ func.sum(aliased_reaction.id).label("reacted_stat"),
func.sum(case((aliased_reaction.kind == ReactionKind.COMMENT, 1), else_=0)).label(
- 'commented_stat'
+ "commented_stat"
),
func.sum(
case(
@@ -36,13 +34,13 @@ def add_stat_columns(q):
(aliased_reaction.kind == ReactionKind.DISLIKE, -1),
else_=0,
)
- ).label('rating_stat'),
+ ).label("rating_stat"),
func.max(
case(
(aliased_reaction.kind != ReactionKind.COMMENT, None),
else_=aliased_reaction.createdAt,
)
- ).label('last_comment'),
+ ).label("last_comment"),
)
return q
@@ -60,7 +58,7 @@ def apply_filters(q, filters, user_id=None):
if filters.get("layout"):
q = q.filter(Shout.layout == filters.get("layout"))
- if filters.get('excludeLayout'):
+ if filters.get("excludeLayout"):
q = q.filter(Shout.layout != filters.get("excludeLayout"))
if filters.get("author"):
q = q.filter(Shout.authors.any(slug=filters.get("author")))
@@ -95,9 +93,13 @@ async def load_shout(_, info, slug=None, shout_id=None):
q = q.filter(Shout.deletedAt.is_(None)).group_by(Shout.id)
try:
- [shout, reacted_stat, commented_stat, rating_stat, last_comment] = session.execute(
- q
- ).first()
+ [
+ shout,
+ reacted_stat,
+ commented_stat,
+ rating_stat,
+ last_comment,
+ ] = session.execute(q).first()
shout.stat = {
"viewed": shout.views,
@@ -154,7 +156,7 @@ async def load_shouts_by(_, info, options):
order_by = options.get("order_by", Shout.publishedAt)
- query_order_by = desc(order_by) if options.get('order_by_desc', True) else asc(order_by)
+ query_order_by = desc(order_by) if options.get("order_by_desc", True) else asc(order_by)
offset = options.get("offset", 0)
limit = options.get("limit", 10)
@@ -164,9 +166,13 @@ async def load_shouts_by(_, info, options):
with local_session() as session:
shouts_map = {}
- for [shout, reacted_stat, commented_stat, rating_stat, last_comment] in session.execute(
- q
- ).unique():
+ for [
+ shout,
+ reacted_stat,
+ commented_stat,
+ rating_stat,
+ last_comment,
+ ] in session.execute(q).unique():
shouts.append(shout)
shout.stat = {
"viewed": shout.views,
@@ -225,7 +231,11 @@ async def get_my_feed(_, info, options):
joinedload(Shout.topics),
)
.where(
- and_(Shout.publishedAt.is_not(None), Shout.deletedAt.is_(None), Shout.id.in_(subquery))
+ and_(
+ Shout.publishedAt.is_not(None),
+ Shout.deletedAt.is_(None),
+ Shout.id.in_(subquery),
+ )
)
)
@@ -234,7 +244,7 @@ async def get_my_feed(_, info, options):
order_by = options.get("order_by", Shout.publishedAt)
- query_order_by = desc(order_by) if options.get('order_by_desc', True) else asc(order_by)
+ query_order_by = desc(order_by) if options.get("order_by_desc", True) else asc(order_by)
offset = options.get("offset", 0)
limit = options.get("limit", 10)
@@ -243,9 +253,13 @@ async def get_my_feed(_, info, options):
shouts = []
with local_session() as session:
shouts_map = {}
- for [shout, reacted_stat, commented_stat, rating_stat, last_comment] in session.execute(
- q
- ).unique():
+ for [
+ shout,
+ reacted_stat,
+ commented_stat,
+ rating_stat,
+ last_comment,
+ ] in session.execute(q).unique():
shouts.append(shout)
shout.stat = {
"viewed": shout.views,
diff --git a/resolvers/zine/profile.py b/resolvers/zine/profile.py
index ecdc26c7..7275226d 100644
--- a/resolvers/zine/profile.py
+++ b/resolvers/zine/profile.py
@@ -1,18 +1,16 @@
-from datetime import datetime, timedelta, timezone
-from typing import List
-
-from sqlalchemy import and_, distinct, func, literal, select
-from sqlalchemy.orm import aliased, joinedload
-
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
from base.orm import local_session
from base.resolvers import mutation, query
+from datetime import datetime, timedelta, timezone
from orm.reaction import Reaction, ReactionKind
from orm.shout import ShoutAuthor, ShoutTopic
from orm.topic import Topic, TopicFollower
from orm.user import AuthorFollower, Role, User, UserRating, UserRole
from resolvers.zine.topics import followed_by_user
+from sqlalchemy import and_, distinct, func, literal, select
+from sqlalchemy.orm import aliased, joinedload
+from typing import List
def add_author_stat_columns(q):
@@ -22,24 +20,24 @@ def add_author_stat_columns(q):
# user_rating_aliased = aliased(UserRating)
q = q.outerjoin(shout_author_aliased).add_columns(
- func.count(distinct(shout_author_aliased.shout)).label('shouts_stat')
+ func.count(distinct(shout_author_aliased.shout)).label("shouts_stat")
)
q = q.outerjoin(author_followers, author_followers.author == User.id).add_columns(
- func.count(distinct(author_followers.follower)).label('followers_stat')
+ func.count(distinct(author_followers.follower)).label("followers_stat")
)
q = q.outerjoin(author_following, author_following.follower == User.id).add_columns(
- func.count(distinct(author_following.author)).label('followings_stat')
+ func.count(distinct(author_following.author)).label("followings_stat")
)
- q = q.add_columns(literal(0).label('rating_stat'))
+ q = q.add_columns(literal(0).label("rating_stat"))
# FIXME
# q = q.outerjoin(user_rating_aliased, user_rating_aliased.user == User.id).add_columns(
# # TODO: check
# func.sum(user_rating_aliased.value).label('rating_stat')
# )
- q = q.add_columns(literal(0).label('commented_stat'))
+ q = q.add_columns(literal(0).label("commented_stat"))
# q = q.outerjoin(Reaction, and_(Reaction.createdBy == User.id, Reaction.body.is_not(None))).add_columns(
# func.count(distinct(Reaction.id)).label('commented_stat')
# )
@@ -50,7 +48,13 @@ def add_author_stat_columns(q):
def add_stat(author, stat_columns):
- [shouts_stat, followers_stat, followings_stat, rating_stat, commented_stat] = stat_columns
+ [
+ shouts_stat,
+ followers_stat,
+ followings_stat,
+ rating_stat,
+ commented_stat,
+ ] = stat_columns
author.stat = {
"shouts": shouts_stat,
"followers": followers_stat,
@@ -227,7 +231,12 @@ async def get_author(_, _info, slug):
with local_session() as session:
comments_count = (
session.query(Reaction)
- .where(and_(Reaction.createdBy == author.id, Reaction.kind == ReactionKind.COMMENT))
+ .where(
+ and_(
+ Reaction.createdBy == author.id,
+ Reaction.kind == ReactionKind.COMMENT,
+ )
+ )
.count()
)
author.stat["commented"] = comments_count
diff --git a/resolvers/zine/reactions.py b/resolvers/zine/reactions.py
index 0a37f6c3..680cac52 100644
--- a/resolvers/zine/reactions.py
+++ b/resolvers/zine/reactions.py
@@ -1,25 +1,23 @@
-from datetime import datetime, timedelta, timezone
-
-from sqlalchemy import and_, asc, case, desc, func, select, text
-from sqlalchemy.orm import aliased
-
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
from base.exceptions import OperationNotAllowed
from base.orm import local_session
from base.resolvers import mutation, query
+from datetime import datetime, timedelta, timezone
from orm.reaction import Reaction, ReactionKind
from orm.shout import Shout, ShoutReactionsFollower
from orm.user import User
from services.notifications.notification_service import notification_service
+from sqlalchemy import and_, asc, case, desc, func, select, text
+from sqlalchemy.orm import aliased
def add_reaction_stat_columns(q):
aliased_reaction = aliased(Reaction)
q = q.outerjoin(aliased_reaction, Reaction.id == aliased_reaction.replyTo).add_columns(
- func.sum(aliased_reaction.id).label('reacted_stat'),
- func.sum(case((aliased_reaction.body.is_not(None), 1), else_=0)).label('commented_stat'),
+ func.sum(aliased_reaction.id).label("reacted_stat"),
+ func.sum(case((aliased_reaction.body.is_not(None), 1), else_=0)).label("commented_stat"),
func.sum(
case(
(aliased_reaction.kind == ReactionKind.AGREE, 1),
@@ -32,7 +30,7 @@ def add_reaction_stat_columns(q):
(aliased_reaction.kind == ReactionKind.DISLIKE, -1),
else_=0,
)
- ).label('rating_stat'),
+ ).label("rating_stat"),
)
return q
@@ -91,7 +89,7 @@ def reactions_unfollow(user_id: int, shout_id: int):
def is_published_author(session, user_id):
- '''checks if user has at least one publication'''
+ """checks if user has at least one publication"""
return (
session.query(Shout)
.where(Shout.authors.contains(user_id))
@@ -102,7 +100,7 @@ def is_published_author(session, user_id):
def check_to_publish(session, user_id, reaction):
- '''set shout to public if publicated approvers amount > 4'''
+ """set shout to public if publicated approvers amount > 4"""
if not reaction.replyTo and reaction.kind in [
ReactionKind.ACCEPT,
ReactionKind.LIKE,
@@ -126,7 +124,7 @@ def check_to_publish(session, user_id, reaction):
def check_to_hide(session, user_id, reaction):
- '''hides any shout if 20% of reactions are negative'''
+ """hides any shout if 20% of reactions are negative"""
if not reaction.replyTo and reaction.kind in [
ReactionKind.REJECT,
ReactionKind.DISLIKE,
@@ -136,7 +134,11 @@ def check_to_hide(session, user_id, reaction):
approvers_reactions = session.query(Reaction).where(Reaction.shout == reaction.shout).all()
rejects = 0
for r in approvers_reactions:
- if r.kind in [ReactionKind.REJECT, ReactionKind.DISLIKE, ReactionKind.DISPROOF]:
+ if r.kind in [
+ ReactionKind.REJECT,
+ ReactionKind.DISLIKE,
+ ReactionKind.DISPROOF,
+ ]:
rejects += 1
if len(approvers_reactions) / rejects < 5:
return True
@@ -146,14 +148,14 @@ def check_to_hide(session, user_id, reaction):
def set_published(session, shout_id):
s = session.query(Shout).where(Shout.id == shout_id).first()
s.publishedAt = datetime.now(tz=timezone.utc)
- s.visibility = text('public')
+ s.visibility = text("public")
session.add(s)
session.commit()
def set_hidden(session, shout_id):
s = session.query(Shout).where(Shout.id == shout_id).first()
- s.visibility = text('community')
+ s.visibility = text("community")
session.add(s)
session.commit()
@@ -162,7 +164,7 @@ def set_hidden(session, shout_id):
@login_required
async def create_reaction(_, info, reaction):
auth: AuthCredentials = info.context["request"].auth
- reaction['createdBy'] = auth.user_id
+ reaction["createdBy"] = auth.user_id
rdict = {}
with local_session() as session:
shout = session.query(Shout).where(Shout.id == reaction["shout"]).one()
@@ -230,8 +232,8 @@ async def create_reaction(_, info, reaction):
await notification_service.handle_new_reaction(r.id)
rdict = r.dict()
- rdict['shout'] = shout.dict()
- rdict['createdBy'] = author.dict()
+ rdict["shout"] = shout.dict()
+ rdict["createdBy"] = author.dict()
# self-regulation mechanics
if check_to_hide(session, auth.user_id, r):
@@ -244,7 +246,7 @@ async def create_reaction(_, info, reaction):
except Exception as e:
print(f"[resolvers.reactions] error on reactions autofollowing: {e}")
- rdict['stat'] = {"commented": 0, "reacted": 0, "rating": 0}
+ rdict["stat"] = {"commented": 0, "reacted": 0, "rating": 0}
return {"reaction": rdict}
@@ -274,7 +276,11 @@ async def update_reaction(_, info, id, reaction={}):
if reaction.get("range"):
r.range = reaction.get("range")
session.commit()
- r.stat = {"commented": commented_stat, "reacted": reacted_stat, "rating": rating_stat}
+ r.stat = {
+ "commented": commented_stat,
+ "reacted": reacted_stat,
+ "rating": rating_stat,
+ }
return {"reaction": r}
@@ -338,7 +344,7 @@ async def load_reactions_by(_, _info, by, limit=50, offset=0):
if by.get("comment"):
q = q.filter(func.length(Reaction.body) > 0)
- if len(by.get('search', '')) > 2:
+ if len(by.get("search", "")) > 2:
q = q.filter(Reaction.body.ilike(f'%{by["body"]}%'))
if by.get("days"):
@@ -346,7 +352,7 @@ async def load_reactions_by(_, _info, by, limit=50, offset=0):
q = q.filter(Reaction.createdAt > after)
order_way = asc if by.get("sort", "").startswith("-") else desc
- order_field = by.get("sort", "").replace('-', '') or Reaction.createdAt
+ order_field = by.get("sort", "").replace("-", "") or Reaction.createdAt
q = q.group_by(Reaction.id, User.id, Shout.id).order_by(order_way(order_field))
@@ -357,9 +363,14 @@ async def load_reactions_by(_, _info, by, limit=50, offset=0):
reactions = []
with local_session() as session:
- for [reaction, user, shout, reacted_stat, commented_stat, rating_stat] in session.execute(
- q
- ):
+ for [
+ reaction,
+ user,
+ shout,
+ reacted_stat,
+ commented_stat,
+ rating_stat,
+ ] in session.execute(q):
reaction.createdBy = user
reaction.shout = shout
reaction.stat = {
diff --git a/resolvers/zine/topics.py b/resolvers/zine/topics.py
index 72ecf9ac..f24065cd 100644
--- a/resolvers/zine/topics.py
+++ b/resolvers/zine/topics.py
@@ -1,12 +1,11 @@
-from sqlalchemy import and_, distinct, func, select
-from sqlalchemy.orm import aliased
-
from auth.authenticate import login_required
from base.orm import local_session
from base.resolvers import mutation, query
from orm import User
from orm.shout import ShoutAuthor, ShoutTopic
from orm.topic import Topic, TopicFollower
+from sqlalchemy import and_, distinct, func, select
+from sqlalchemy.orm import aliased
def add_topic_stat_columns(q):
@@ -15,11 +14,11 @@ def add_topic_stat_columns(q):
q = (
q.outerjoin(ShoutTopic, Topic.id == ShoutTopic.topic)
- .add_columns(func.count(distinct(ShoutTopic.shout)).label('shouts_stat'))
+ .add_columns(func.count(distinct(ShoutTopic.shout)).label("shouts_stat"))
.outerjoin(aliased_shout_author, ShoutTopic.shout == aliased_shout_author.shout)
- .add_columns(func.count(distinct(aliased_shout_author.user)).label('authors_stat'))
+ .add_columns(func.count(distinct(aliased_shout_author.user)).label("authors_stat"))
.outerjoin(aliased_topic_follower)
- .add_columns(func.count(distinct(aliased_topic_follower.follower)).label('followers_stat'))
+ .add_columns(func.count(distinct(aliased_topic_follower.follower)).label("followers_stat"))
)
q = q.group_by(Topic.id)
@@ -29,7 +28,11 @@ def add_topic_stat_columns(q):
def add_stat(topic, stat_columns):
[shouts_stat, authors_stat, followers_stat] = stat_columns
- topic.stat = {"shouts": shouts_stat, "authors": authors_stat, "followers": followers_stat}
+ topic.stat = {
+ "shouts": shouts_stat,
+ "authors": authors_stat,
+ "followers": followers_stat,
+ }
return topic
diff --git a/server.py b/server.py
index 48186da0..a491c30d 100644
--- a/server.py
+++ b/server.py
@@ -1,45 +1,44 @@
+from settings import DEV_SERVER_PID_FILE_NAME, PORT
+
import os
import sys
-
import uvicorn
-from settings import DEV_SERVER_PID_FILE_NAME, PORT
-
def exception_handler(exception_type, exception, traceback, debug_hook=sys.excepthook):
print("%s: %s" % (exception_type.__name__, exception))
log_settings = {
- 'version': 1,
- 'disable_existing_loggers': True,
- 'formatters': {
- 'default': {
- '()': 'uvicorn.logging.DefaultFormatter',
- 'fmt': '%(levelprefix)s %(message)s',
- 'use_colors': None,
+ "version": 1,
+ "disable_existing_loggers": True,
+ "formatters": {
+ "default": {
+ "()": "uvicorn.logging.DefaultFormatter",
+ "fmt": "%(levelprefix)s %(message)s",
+ "use_colors": None,
},
- 'access': {
- '()': 'uvicorn.logging.AccessFormatter',
- 'fmt': '%(levelprefix)s %(client_addr)s - "%(request_line)s" %(status_code)s',
+ "access": {
+ "()": "uvicorn.logging.AccessFormatter",
+ "fmt": '%(levelprefix)s %(client_addr)s - "%(request_line)s" %(status_code)s',
},
},
- 'handlers': {
- 'default': {
- 'formatter': 'default',
- 'class': 'logging.StreamHandler',
- 'stream': 'ext://sys.stderr',
+ "handlers": {
+ "default": {
+ "formatter": "default",
+ "class": "logging.StreamHandler",
+ "stream": "ext://sys.stderr",
},
- 'access': {
- 'formatter': 'access',
- 'class': 'logging.StreamHandler',
- 'stream': 'ext://sys.stdout',
+ "access": {
+ "formatter": "access",
+ "class": "logging.StreamHandler",
+ "stream": "ext://sys.stdout",
},
},
- 'loggers': {
- 'uvicorn': {'handlers': ['default'], 'level': 'INFO'},
- 'uvicorn.error': {'level': 'INFO', 'handlers': ['default'], 'propagate': True},
- 'uvicorn.access': {'handlers': ['access'], 'level': 'INFO', 'propagate': False},
+ "loggers": {
+ "uvicorn": {"handlers": ["default"], "level": "INFO"},
+ "uvicorn.error": {"level": "INFO", "handlers": ["default"], "propagate": True},
+ "uvicorn.access": {"handlers": ["access"], "level": "INFO", "propagate": False},
},
}
@@ -48,7 +47,8 @@ local_headers = [
("Access-Control-Allow-Origin", "https://localhost:3000"),
(
"Access-Control-Allow-Headers",
- "DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization",
+ "DNT,User-Agent,X-Requested-With,If-Modified-Since,"
+ + "Cache-Control,Content-Type,Range,Authorization",
),
("Access-Control-Expose-Headers", "Content-Length,Content-Range"),
("Access-Control-Allow-Credentials", "true"),
@@ -92,4 +92,10 @@ if __name__ == "__main__":
json_tables()
else:
sys.excepthook = exception_handler
- uvicorn.run("main:app", host="0.0.0.0", port=PORT, proxy_headers=True, server_header=True)
+ uvicorn.run(
+ "main:app",
+ host="0.0.0.0",
+ port=PORT,
+ proxy_headers=True,
+ server_header=True,
+ )
diff --git a/services/following.py b/services/following.py
index 8261d696..a2be6af4 100644
--- a/services/following.py
+++ b/services/following.py
@@ -18,7 +18,7 @@ class Following:
class FollowingManager:
lock = asyncio.Lock()
- data = {'author': [], 'topic': [], 'shout': [], 'chat': []}
+ data = {"author": [], "topic": [], "shout": [], "chat": []}
@staticmethod
async def register(kind, uid):
@@ -34,13 +34,13 @@ class FollowingManager:
async def push(kind, payload):
try:
async with FollowingManager.lock:
- if kind == 'chat':
- for chat in FollowingManager['chat']:
+ if kind == "chat":
+ for chat in FollowingManager["chat"]:
if payload.message["chatId"] == chat.uid:
chat.queue.put_nowait(payload)
else:
for entity in FollowingManager[kind]:
- if payload.shout['createdBy'] == entity.uid:
+ if payload.shout["createdBy"] == entity.uid:
entity.queue.put_nowait(payload)
except Exception as e:
print(Exception(e))
diff --git a/services/main.py b/services/main.py
index 98fddcc1..6397a5e5 100644
--- a/services/main.py
+++ b/services/main.py
@@ -5,9 +5,9 @@ from services.stat.viewed import ViewedStorage
async def storages_init():
with local_session() as session:
- print('[main] initialize SearchService')
+ print("[main] initialize SearchService")
await SearchService.init(session)
- print('[main] SearchService initialized')
- print('[main] initialize storages')
+ print("[main] SearchService initialized")
+ print("[main] initialize storages")
await ViewedStorage.init()
- print('[main] storages initialized')
+ print("[main] storages initialized")
diff --git a/services/notifications/notification_service.py b/services/notifications/notification_service.py
index 8467e836..ade98763 100644
--- a/services/notifications/notification_service.py
+++ b/services/notifications/notification_service.py
@@ -1,14 +1,13 @@
-import asyncio
-import json
-from datetime import datetime, timezone
-
-from sqlalchemy import and_
-
from base.orm import local_session
+from datetime import datetime, timezone
from orm import Notification, Reaction, Shout, User
from orm.notification import NotificationType
from orm.reaction import ReactionKind
from services.notifications.sse import connection_manager
+from sqlalchemy import and_
+
+import asyncio
+import json
def shout_to_shout_data(shout):
@@ -16,13 +15,18 @@ def shout_to_shout_data(shout):
def user_to_user_data(user):
- return {"id": user.id, "name": user.name, "slug": user.slug, "userpic": user.userpic}
+ return {
+ "id": user.id,
+ "name": user.name,
+ "slug": user.slug,
+ "userpic": user.userpic,
+ }
def update_prev_notification(notification, user, reaction):
notification_data = json.loads(notification.data)
- notification_data["users"] = [u for u in notification_data["users"] if u['id'] != user.id]
+ notification_data["users"] = [u for u in notification_data["users"] if u["id"] != user.id]
notification_data["users"].append(user_to_user_data(user))
if notification_data["reactionIds"] is None:
@@ -61,7 +65,7 @@ class NewReactionNotificator:
Notification.type == NotificationType.NEW_REPLY,
Notification.shout == shout.id,
Notification.reaction == parent_reaction.id,
- Notification.seen == False,
+ Notification.seen == False, # noqa: E712
)
)
.first()
@@ -103,7 +107,7 @@ class NewReactionNotificator:
Notification.user == shout.createdBy,
Notification.type == NotificationType.NEW_COMMENT,
Notification.shout == shout.id,
- Notification.seen == False,
+ Notification.seen == False, # noqa: E712
)
)
.first()
@@ -154,7 +158,7 @@ class NotificationService:
try:
await notificator.run()
except Exception as e:
- print(f'[NotificationService.worker] error: {str(e)}')
+ print(f"[NotificationService.worker] error: {str(e)}")
notification_service = NotificationService()
diff --git a/services/notifications/sse.py b/services/notifications/sse.py
index 55cae575..23352344 100644
--- a/services/notifications/sse.py
+++ b/services/notifications/sse.py
@@ -1,9 +1,9 @@
-import asyncio
-import json
-
from sse_starlette.sse import EventSourceResponse
from starlette.requests import Request
+import asyncio
+import json
+
class ConnectionManager:
def __init__(self):
diff --git a/services/search.py b/services/search.py
index d1748cdd..ffcd32b5 100644
--- a/services/search.py
+++ b/services/search.py
@@ -1,10 +1,10 @@
-import asyncio
-import json
-
from base.redis import redis
from orm.shout import Shout
from resolvers.zine.load import load_shouts_by
+import asyncio
+import json
+
class SearchService:
lock = asyncio.Lock()
@@ -13,7 +13,7 @@ class SearchService:
@staticmethod
async def init(session):
async with SearchService.lock:
- print('[search.service] did nothing')
+ print("[search.service] did nothing")
SearchService.cache = {}
@staticmethod
@@ -21,7 +21,12 @@ class SearchService:
cached = await redis.execute("GET", text)
if not cached:
async with SearchService.lock:
- options = {"title": text, "body": text, "limit": limit, "offset": offset}
+ options = {
+ "title": text,
+ "body": text,
+ "limit": limit,
+ "offset": offset,
+ }
payload = await load_shouts_by(None, None, options)
await redis.execute("SET", text, json.dumps(payload))
return payload
diff --git a/services/stat/viewed.py b/services/stat/viewed.py
index c9f9a6db..ce5070b2 100644
--- a/services/stat/viewed.py
+++ b/services/stat/viewed.py
@@ -1,16 +1,14 @@
-import asyncio
-import time
+from base.orm import local_session
from datetime import datetime, timedelta, timezone
+from gql import Client, gql
+from gql.transport.aiohttp import AIOHTTPTransport
+from orm import Topic
+from orm.shout import Shout, ShoutTopic
from os import environ, path
from ssl import create_default_context
-from gql import Client, gql
-from gql.transport.aiohttp import AIOHTTPTransport
-from sqlalchemy import func
-
-from base.orm import local_session
-from orm import Topic, User
-from orm.shout import Shout, ShoutTopic
+import asyncio
+import time
load_facts = gql(
"""
@@ -46,7 +44,7 @@ query getDomains {
}
"""
)
-schema_str = open(path.dirname(__file__) + '/ackee.graphql').read()
+schema_str = open(path.dirname(__file__) + "/ackee.graphql").read()
token = environ.get("ACKEE_TOKEN", "")
@@ -54,7 +52,9 @@ def create_client(headers=None, schema=None):
return Client(
schema=schema,
transport=AIOHTTPTransport(
- url="https://ackee.discours.io/api", ssl=create_default_context(), headers=headers
+ url="https://ackee.discours.io/api",
+ ssl=create_default_context(),
+ headers=headers,
),
)
@@ -98,7 +98,7 @@ class ViewedStorage:
try:
for page in self.pages:
p = page["value"].split("?")[0]
- slug = p.split('discours.io/')[-1]
+ slug = p.split("discours.io/")[-1]
shouts[slug] = page["count"]
for slug in shouts.keys():
await ViewedStorage.increment(slug, shouts[slug])
@@ -162,14 +162,14 @@ class ViewedStorage:
self.by_topics[topic.slug][shout_slug] = self.by_shouts[shout_slug]
@staticmethod
- async def increment(shout_slug, amount=1, viewer='ackee'):
+ async def increment(shout_slug, amount=1, viewer="ackee"):
"""the only way to change views counter"""
self = ViewedStorage
async with self.lock:
# TODO optimize, currenty we execute 1 DB transaction per shout
with local_session() as session:
shout = session.query(Shout).where(Shout.slug == shout_slug).one()
- if viewer == 'old-discours':
+ if viewer == "old-discours":
# this is needed for old db migration
if shout.viewsOld == amount:
print(f"viewsOld amount: {amount}")
diff --git a/services/zine/gittask.py b/services/zine/gittask.py
index 31e55025..6c6ce440 100644
--- a/services/zine/gittask.py
+++ b/services/zine/gittask.py
@@ -1,8 +1,8 @@
+from pathlib import Path
+from settings import SHOUTS_REPO
+
import asyncio
import subprocess
-from pathlib import Path
-
-from settings import SHOUTS_REPO
class GitTask:
diff --git a/settings.py b/settings.py
index bd096081..f3da9952 100644
--- a/settings.py
+++ b/settings.py
@@ -31,4 +31,4 @@ SENTRY_DSN = environ.get("SENTRY_DSN")
SESSION_SECRET_KEY = environ.get("SESSION_SECRET_KEY") or "!secret"
# for local development
-DEV_SERVER_PID_FILE_NAME = 'dev-server.pid'
+DEV_SERVER_PID_FILE_NAME = "dev-server.pid"
diff --git a/setup.cfg b/setup.cfg
old mode 100755
new mode 100644
index 588918a1..e3db2ef9
--- a/setup.cfg
+++ b/setup.cfg
@@ -9,15 +9,16 @@ force_alphabetical_sort = false
[tool:brunette]
# https://github.com/odwyersoftware/brunette
-line-length = 120
+line-length = 100
single-quotes = false
[flake8]
# https://github.com/PyCQA/flake8
exclude = .git,__pycache__,.mypy_cache,.vercel
-max-line-length = 120
-max-complexity = 15
+max-line-length = 100
+max-complexity = 10
select = B,C,E,F,W,T4,B9
+# FIXME
# E203: Whitespace before ':'
# E266: Too many leading '#' for block comment
# E501: Line too long (82 > 79 characters)
diff --git a/setup.cfg.bak b/setup.cfg.bak
new file mode 100644
index 00000000..588918a1
--- /dev/null
+++ b/setup.cfg.bak
@@ -0,0 +1,39 @@
+[isort]
+# https://github.com/PyCQA/isort
+line_length = 120
+multi_line_output = 3
+include_trailing_comma = true
+force_grid_wrap = 0
+use_parentheses = true
+force_alphabetical_sort = false
+
+[tool:brunette]
+# https://github.com/odwyersoftware/brunette
+line-length = 120
+single-quotes = false
+
+[flake8]
+# https://github.com/PyCQA/flake8
+exclude = .git,__pycache__,.mypy_cache,.vercel
+max-line-length = 120
+max-complexity = 15
+select = B,C,E,F,W,T4,B9
+# E203: Whitespace before ':'
+# E266: Too many leading '#' for block comment
+# E501: Line too long (82 > 79 characters)
+# E722: Do not use bare except, specify exception instead
+# W503: Line break occurred before a binary operator
+# F403: 'from module import *' used; unable to detect undefined names
+# C901: Function is too complex
+ignore = E203,E266,E501,E722,W503,F403,C901
+
+[mypy]
+# https://github.com/python/mypy
+ignore_missing_imports = true
+warn_return_any = false
+warn_unused_configs = true
+disallow_untyped_calls = true
+disallow_untyped_defs = true
+disallow_incomplete_defs = true
+[mypy-api.*]
+ignore_errors = true
diff --git a/validations/auth.py b/validations/auth.py
index 73b83079..59c49bd4 100644
--- a/validations/auth.py
+++ b/validations/auth.py
@@ -1,6 +1,5 @@
-from typing import Optional, Text
-
from pydantic import BaseModel
+from typing import Optional, Text
class AuthInput(BaseModel):
diff --git a/validations/inbox.py b/validations/inbox.py
index 58645dd9..d864ed67 100644
--- a/validations/inbox.py
+++ b/validations/inbox.py
@@ -1,6 +1,5 @@
-from typing import List, Optional, Text
-
from pydantic import BaseModel
+from typing import List, Optional, Text
class Message(BaseModel):
From b14294980597e4b1e79ad35d1d2c63fede8ef05d Mon Sep 17 00:00:00 2001
From: Kosta <47947996+dobrodob@users.noreply.github.com>
Date: Fri, 27 Oct 2023 00:07:35 +0300
Subject: [PATCH 08/27] Revert "Feature/lint"
---
.flake8 | 6 +-
.pre-commit-config.yaml | 21 +-
Procfile | 1 +
README.md | 8 +-
ai/preprocess.py | 26 ++-
alembic/env.py | 14 +-
alembic/versions/fe943b098418_init_alembic.py | 8 +-
auth/authenticate.py | 60 +++---
auth/credentials.py | 7 +-
auth/email.py | 27 ++-
auth/identity.py | 40 ++--
auth/jwtcodec.py | 25 ++-
auth/oauth.py | 6 +-
auth/tokenstorage.py | 9 +-
base/exceptions.py | 2 +-
base/orm.py | 14 +-
main.py | 51 ++---
migrate.sh | 1 +
migration/__init__.py | 70 +++---
migration/bson2json.py | 10 +-
migration/export.py | 49 +++--
migration/extract.py | 159 +++++++-------
migration/html2text/__init__.py | 79 +++++--
migration/html2text/cli.py | 13 +-
migration/html2text/utils.py | 22 +-
migration/tables/comments.py | 93 ++++----
migration/tables/content_items.py | 119 +++++------
migration/tables/remarks.py | 36 ++--
migration/tables/topics.py | 2 +-
migration/tables/users.py | 40 ++--
orm/__init__.py | 4 +-
orm/collection.py | 4 +-
orm/community.py | 19 +-
orm/notification.py | 7 +-
orm/rbac.py | 48 ++---
orm/reaction.py | 12 +-
orm/shout.py | 16 +-
orm/topic.py | 12 +-
orm/user.py | 20 +-
requirements-dev.txt | 2 -
requirements.txt | 3 +
resetdb.sh | 1 +
resolvers/__init__.py | 67 ++++++
resolvers/auth.py | 62 +++---
resolvers/create/editor.py | 83 +++-----
resolvers/create/migrate.py | 21 +-
resolvers/inbox/chats.py | 57 +++--
resolvers/inbox/load.py | 102 +++++----
resolvers/inbox/messages.py | 53 +++--
resolvers/inbox/search.py | 63 +++---
resolvers/notifications.py | 57 +++--
resolvers/upload.py | 46 ++--
resolvers/zine/following.py | 43 ++--
resolvers/zine/load.py | 163 +++++++-------
resolvers/zine/profile.py | 90 ++++----
resolvers/zine/reactions.py | 199 +++++++++---------
resolvers/zine/topics.py | 34 +--
server.py | 78 ++++---
services/following.py | 13 +-
services/main.py | 10 +-
.../notifications/notification_service.py | 122 +++++------
services/notifications/sse.py | 8 +-
services/search.py | 9 +-
services/stat/viewed.py | 79 ++++---
services/zine/gittask.py | 6 +-
settings.py | 7 +-
setup.cfg | 7 +-
setup.cfg.bak | 39 ----
validations/auth.py | 2 +-
validations/inbox.py | 2 +-
70 files changed, 1465 insertions(+), 1223 deletions(-)
create mode 100644 resolvers/__init__.py
mode change 100644 => 100755 setup.cfg
delete mode 100644 setup.cfg.bak
diff --git a/.flake8 b/.flake8
index 523cb30f..e82de95a 100644
--- a/.flake8
+++ b/.flake8
@@ -1,6 +1,6 @@
[flake8]
-ignore = E203
+ignore = E203,W504,W191,W503
exclude = .git,__pycache__,orm/rbac.py
-max-complexity = 15
-max-line-length = 100
+max-complexity = 10
+max-line-length = 108
indent-string = ' '
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 42569413..af489f3a 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -6,7 +6,7 @@ exclude: |
)
default_language_version:
- python: python3.10
+ python: python3.8
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
@@ -17,23 +17,28 @@ repos:
- id: check-docstring-first
- id: check-json
- id: check-merge-conflict
+ - id: check-toml
- id: check-yaml
- id: end-of-file-fixer
- id: trailing-whitespace
-# - repo: https://github.com/timothycrosley/isort
-# rev: 5.12.0
-# hooks:
-# - id: isort
+ - repo: https://github.com/timothycrosley/isort
+ rev: 5.5.3
+ hooks:
+ - id: isort
- repo: https://github.com/ambv/black
- rev: 23.9.1
+ rev: 20.8b1
hooks:
- id: black
args:
- --line-length=100
+ - --skip-string-normalization
- - repo: https://github.com/PyCQA/flake8
- rev: 6.1.0
+ - repo: https://gitlab.com/pycqa/flake8
+ rev: 3.8.3
hooks:
- id: flake8
+ args:
+ - --max-line-length=100
+ - --disable=protected-access
diff --git a/Procfile b/Procfile
index ac9d762f..c5c1bfa8 100644
--- a/Procfile
+++ b/Procfile
@@ -1 +1,2 @@
web: python server.py
+
diff --git a/README.md b/README.md
index 6f57e39c..1a1ee0a4 100644
--- a/README.md
+++ b/README.md
@@ -35,13 +35,6 @@ pip install -r requirements.txt
python3 server.py dev
```
-# pre-commit hook
-
-```
-pip install -r requirements-dev.txt
-pre-commit install
-```
-
# How to do an authorized request
Put the header 'Authorization' with token from signIn query or registerUser mutation.
@@ -49,3 +42,4 @@ Put the header 'Authorization' with token from signIn query or registerUser muta
# How to debug Ackee
Set ACKEE_TOKEN var
+
diff --git a/ai/preprocess.py b/ai/preprocess.py
index 82d06f71..afd8dbd8 100644
--- a/ai/preprocess.py
+++ b/ai/preprocess.py
@@ -1,28 +1,28 @@
+import re
+import nltk
from bs4 import BeautifulSoup
from nltk.corpus import stopwords
from pymystem3 import Mystem
from string import punctuation
-
-import nltk
-import re
+from transformers import BertTokenizer
nltk.download("stopwords")
def get_clear_text(text):
- soup = BeautifulSoup(text, "html.parser")
+ soup = BeautifulSoup(text, 'html.parser')
# extract the plain text from the HTML document without tags
- clear_text = ""
+ clear_text = ''
for tag in soup.find_all():
- clear_text += tag.string or ""
+ clear_text += tag.string or ''
- clear_text = re.sub(pattern="[\u202F\u00A0\n]+", repl=" ", string=clear_text)
+ clear_text = re.sub(pattern='[\u202F\u00A0\n]+', repl=' ', string=clear_text)
# only words
- clear_text = re.sub(pattern="[^A-ZА-ЯЁ -]", repl="", string=clear_text, flags=re.IGNORECASE)
+ clear_text = re.sub(pattern='[^A-ZА-ЯЁ -]', repl='', string=clear_text, flags=re.IGNORECASE)
- clear_text = re.sub(pattern=r"\s+", repl=" ", string=clear_text)
+ clear_text = re.sub(pattern='\s+', repl=' ', string=clear_text)
clear_text = clear_text.lower()
@@ -30,11 +30,9 @@ def get_clear_text(text):
russian_stopwords = stopwords.words("russian")
tokens = mystem.lemmatize(clear_text)
- tokens = [
- token
- for token in tokens
- if token not in russian_stopwords and token != " " and token.strip() not in punctuation
- ]
+ tokens = [token for token in tokens if token not in russian_stopwords \
+ and token != " " \
+ and token.strip() not in punctuation]
clear_text = " ".join(tokens)
diff --git a/alembic/env.py b/alembic/env.py
index 58e3e200..c6d69a97 100644
--- a/alembic/env.py
+++ b/alembic/env.py
@@ -1,8 +1,11 @@
-from alembic import context
-from base.orm import Base
from logging.config import fileConfig
+
+from sqlalchemy import engine_from_config
+from sqlalchemy import pool
+
+from alembic import context
+
from settings import DB_URL
-from sqlalchemy import engine_from_config, pool
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
@@ -16,6 +19,7 @@ config.set_section_option(config.config_ini_section, "DB_URL", DB_URL)
if config.config_file_name is not None:
fileConfig(config.config_file_name)
+from base.orm import Base
target_metadata = [Base.metadata]
# other values from the config, defined by the needs of env.py,
@@ -62,7 +66,9 @@ def run_migrations_online() -> None:
)
with connectable.connect() as connection:
- context.configure(connection=connection, target_metadata=target_metadata)
+ context.configure(
+ connection=connection, target_metadata=target_metadata
+ )
with context.begin_transaction():
context.run_migrations()
diff --git a/alembic/versions/fe943b098418_init_alembic.py b/alembic/versions/fe943b098418_init_alembic.py
index 52796fea..4ec6d519 100644
--- a/alembic/versions/fe943b098418_init_alembic.py
+++ b/alembic/versions/fe943b098418_init_alembic.py
@@ -1,18 +1,18 @@
"""init alembic
Revision ID: fe943b098418
-Revises:
+Revises:
Create Date: 2023-08-19 01:37:57.031933
"""
from typing import Sequence, Union
-# import sqlalchemy as sa
+from alembic import op
+import sqlalchemy as sa
-# from alembic import op
# revision identifiers, used by Alembic.
-revision: str = "fe943b098418"
+revision: str = 'fe943b098418'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
diff --git a/auth/authenticate.py b/auth/authenticate.py
index 9e4c93fc..be4db2d2 100644
--- a/auth/authenticate.py
+++ b/auth/authenticate.py
@@ -1,70 +1,76 @@
-from auth.credentials import AuthCredentials, AuthUser
-from auth.tokenstorage import SessionToken
-from base.exceptions import OperationNotAllowed
-from base.orm import local_session
from functools import wraps
+from typing import Optional, Tuple
+
from graphql.type import GraphQLResolveInfo
-from orm.user import Role, User
-from settings import SESSION_TOKEN_HEADER
-from sqlalchemy.orm import exc, joinedload
+from sqlalchemy.orm import joinedload, exc
from starlette.authentication import AuthenticationBackend
from starlette.requests import HTTPConnection
-from typing import Optional, Tuple
+
+from auth.credentials import AuthCredentials, AuthUser
+from base.orm import local_session
+from orm.user import User, Role
+
+from settings import SESSION_TOKEN_HEADER
+from auth.tokenstorage import SessionToken
+from base.exceptions import OperationNotAllowed
class JWTAuthenticate(AuthenticationBackend):
async def authenticate(
self, request: HTTPConnection
) -> Optional[Tuple[AuthCredentials, AuthUser]]:
+
if SESSION_TOKEN_HEADER not in request.headers:
- return AuthCredentials(scopes={}), AuthUser(user_id=None, username="")
+ return AuthCredentials(scopes={}), AuthUser(user_id=None, username='')
token = request.headers.get(SESSION_TOKEN_HEADER)
if not token:
print("[auth.authenticate] no token in header %s" % SESSION_TOKEN_HEADER)
return AuthCredentials(scopes={}, error_message=str("no token")), AuthUser(
- user_id=None, username=""
+ user_id=None, username=''
)
- if len(token.split(".")) > 1:
+ if len(token.split('.')) > 1:
payload = await SessionToken.verify(token)
with local_session() as session:
try:
user = (
- session.query(User)
- .options(
+ session.query(User).options(
joinedload(User.roles).options(joinedload(Role.permissions)),
- joinedload(User.ratings),
- )
- .filter(User.id == payload.user_id)
- .one()
+ joinedload(User.ratings)
+ ).filter(
+ User.id == payload.user_id
+ ).one()
)
scopes = {} # TODO: integrate await user.get_permission()
return (
- AuthCredentials(user_id=payload.user_id, scopes=scopes, logged_in=True),
- AuthUser(user_id=user.id, username=""),
+ AuthCredentials(
+ user_id=payload.user_id,
+ scopes=scopes,
+ logged_in=True
+ ),
+ AuthUser(user_id=user.id, username=''),
)
except exc.NoResultFound:
pass
- return AuthCredentials(scopes={}, error_message=str("Invalid token")), AuthUser(
- user_id=None, username=""
- )
+ return AuthCredentials(scopes={}, error_message=str('Invalid token')), AuthUser(user_id=None, username='')
def login_required(func):
@wraps(func)
async def wrap(parent, info: GraphQLResolveInfo, *args, **kwargs):
- # debug only
- # print('[auth.authenticate] login required for %r with info %r' % (func, info))
+ # print('[auth.authenticate] login required for %r with info %r' % (func, info)) # debug only
auth: AuthCredentials = info.context["request"].auth
# print(auth)
if not auth or not auth.logged_in:
# raise Unauthorized(auth.error_message or "Please login")
- return {"error": "Please login first"}
+ return {
+ "error": "Please login first"
+ }
return await func(parent, info, *args, **kwargs)
return wrap
@@ -73,9 +79,7 @@ def login_required(func):
def permission_required(resource, operation, func):
@wraps(func)
async def wrap(parent, info: GraphQLResolveInfo, *args, **kwargs):
- print(
- "[auth.authenticate] permission_required for %r with info %r" % (func, info)
- ) # debug only
+ print('[auth.authenticate] permission_required for %r with info %r' % (func, info)) # debug only
auth: AuthCredentials = info.context["request"].auth
if not auth.logged_in:
raise OperationNotAllowed(auth.error_message or "Please login")
diff --git a/auth/credentials.py b/auth/credentials.py
index 856c2374..9045b7a4 100644
--- a/auth/credentials.py
+++ b/auth/credentials.py
@@ -1,6 +1,7 @@
-from pydantic import BaseModel
from typing import List, Optional, Text
+from pydantic import BaseModel
+
# from base.exceptions import Unauthorized
@@ -22,7 +23,9 @@ class AuthCredentials(BaseModel):
async def permissions(self) -> List[Permission]:
if self.user_id is None:
# raise Unauthorized("Please login first")
- return {"error": "Please login first"}
+ return {
+ "error": "Please login first"
+ }
else:
# TODO: implement permissions logix
print(self.user_id)
diff --git a/auth/email.py b/auth/email.py
index faa64725..7ca5d9bf 100644
--- a/auth/email.py
+++ b/auth/email.py
@@ -1,17 +1,20 @@
-from settings import MAILGUN_API_KEY, MAILGUN_DOMAIN
-
import requests
-api_url = "https://api.mailgun.net/v3/%s/messages" % (MAILGUN_DOMAIN or "discours.io")
-noreply = "discours.io " % (MAILGUN_DOMAIN or "discours.io")
-lang_subject = {"ru": "Подтверждение почты", "en": "Confirm email"}
+from settings import MAILGUN_API_KEY, MAILGUN_DOMAIN
+
+api_url = "https://api.mailgun.net/v3/%s/messages" % (MAILGUN_DOMAIN or 'discours.io')
+noreply = "discours.io " % (MAILGUN_DOMAIN or 'discours.io')
+lang_subject = {
+ "ru": "Подтверждение почты",
+ "en": "Confirm email"
+}
async def send_auth_email(user, token, lang="ru", template="email_confirmation"):
try:
to = "%s <%s>" % (user.name, user.email)
- if lang not in ["ru", "en"]:
- lang = "ru"
+ if lang not in ['ru', 'en']:
+ lang = 'ru'
subject = lang_subject.get(lang, lang_subject["en"])
template = template + "_" + lang
payload = {
@@ -19,12 +22,16 @@ async def send_auth_email(user, token, lang="ru", template="email_confirmation")
"to": to,
"subject": subject,
"template": template,
- "h:X-Mailgun-Variables": '{ "token": "%s" }' % token,
+ "h:X-Mailgun-Variables": "{ \"token\": \"%s\" }" % token
}
- print("[auth.email] payload: %r" % payload)
+ print('[auth.email] payload: %r' % payload)
# debug
# print('http://localhost:3000/?modal=auth&mode=confirm-email&token=%s' % token)
- response = requests.post(api_url, auth=("api", MAILGUN_API_KEY), data=payload)
+ response = requests.post(
+ api_url,
+ auth=("api", MAILGUN_API_KEY),
+ data=payload
+ )
response.raise_for_status()
except Exception as e:
print(e)
diff --git a/auth/identity.py b/auth/identity.py
index 6e89079f..e4b78040 100644
--- a/auth/identity.py
+++ b/auth/identity.py
@@ -1,14 +1,15 @@
-from auth.jwtcodec import JWTCodec
-from auth.tokenstorage import TokenStorage
-
-# from base.exceptions import InvalidPassword, InvalidToken
-from base.orm import local_session
from binascii import hexlify
from hashlib import sha256
+
from jwt import DecodeError, ExpiredSignatureError
-from orm import User
from passlib.hash import bcrypt
from sqlalchemy import or_
+
+from auth.jwtcodec import JWTCodec
+from auth.tokenstorage import TokenStorage
+# from base.exceptions import InvalidPassword, InvalidToken
+from base.orm import local_session
+from orm import User
from validations.auth import AuthInput
@@ -33,7 +34,6 @@ class Password:
Verify that password hash is equal to specified hash. Hash format:
$2a$10$Ro0CUfOqk6cXEKf3dyaM7OhSCvnwM9s4wIX9JeLapehKK5YdLxKcm
- # noqa: W605
\__/\/ \____________________/\_____________________________/
| | Salt Hash
| Cost
@@ -57,10 +57,14 @@ class Identity:
user = User(**orm_user.dict())
if not user.password:
# raise InvalidPassword("User password is empty")
- return {"error": "User password is empty"}
+ return {
+ "error": "User password is empty"
+ }
if not Password.verify(password, user.password):
# raise InvalidPassword("Wrong user password")
- return {"error": "Wrong user password"}
+ return {
+ "error": "Wrong user password"
+ }
return user
@staticmethod
@@ -83,22 +87,30 @@ class Identity:
@staticmethod
async def onetime(token: str) -> User:
try:
- print("[auth.identity] using one time token")
+ print('[auth.identity] using one time token')
payload = JWTCodec.decode(token)
if not await TokenStorage.exist(f"{payload.user_id}-{payload.username}-{token}"):
# raise InvalidToken("Login token has expired, please login again")
- return {"error": "Token has expired"}
+ return {
+ "error": "Token has expired"
+ }
except ExpiredSignatureError:
# raise InvalidToken("Login token has expired, please try again")
- return {"error": "Token has expired"}
+ return {
+ "error": "Token has expired"
+ }
except DecodeError:
# raise InvalidToken("token format error") from e
- return {"error": "Token format error"}
+ return {
+ "error": "Token format error"
+ }
with local_session() as session:
user = session.query(User).filter_by(id=payload.user_id).first()
if not user:
# raise Exception("user not exist")
- return {"error": "User does not exist"}
+ return {
+ "error": "User does not exist"
+ }
if not user.emailConfirmed:
user.emailConfirmed = True
session.commit()
diff --git a/auth/jwtcodec.py b/auth/jwtcodec.py
index 870ed540..ac561adb 100644
--- a/auth/jwtcodec.py
+++ b/auth/jwtcodec.py
@@ -1,9 +1,8 @@
-from base.exceptions import ExpiredToken, InvalidToken
from datetime import datetime, timezone
-from settings import JWT_ALGORITHM, JWT_SECRET_KEY
-from validations.auth import AuthInput, TokenPayload
-
import jwt
+from base.exceptions import ExpiredToken, InvalidToken
+from validations.auth import TokenPayload, AuthInput
+from settings import JWT_ALGORITHM, JWT_SECRET_KEY
class JWTCodec:
@@ -14,12 +13,12 @@ class JWTCodec:
"username": user.email or user.phone,
"exp": exp,
"iat": datetime.now(tz=timezone.utc),
- "iss": "discours",
+ "iss": "discours"
}
try:
return jwt.encode(payload, JWT_SECRET_KEY, JWT_ALGORITHM)
except Exception as e:
- print("[auth.jwtcodec] JWT encode error %r" % e)
+ print('[auth.jwtcodec] JWT encode error %r' % e)
@staticmethod
def decode(token: str, verify_exp: bool = True) -> TokenPayload:
@@ -34,18 +33,18 @@ class JWTCodec:
# "verify_signature": False
},
algorithms=[JWT_ALGORITHM],
- issuer="discours",
+ issuer="discours"
)
r = TokenPayload(**payload)
# print('[auth.jwtcodec] debug token %r' % r)
return r
except jwt.InvalidIssuedAtError:
- print("[auth.jwtcodec] invalid issued at: %r" % payload)
- raise ExpiredToken("check token issued time")
+ print('[auth.jwtcodec] invalid issued at: %r' % payload)
+ raise ExpiredToken('check token issued time')
except jwt.ExpiredSignatureError:
- print("[auth.jwtcodec] expired signature %r" % payload)
- raise ExpiredToken("check token lifetime")
+ print('[auth.jwtcodec] expired signature %r' % payload)
+ raise ExpiredToken('check token lifetime')
except jwt.InvalidTokenError:
- raise InvalidToken("token is not valid")
+ raise InvalidToken('token is not valid')
except jwt.InvalidSignatureError:
- raise InvalidToken("token is not valid")
+ raise InvalidToken('token is not valid')
diff --git a/auth/oauth.py b/auth/oauth.py
index 89695c72..54b5f11a 100644
--- a/auth/oauth.py
+++ b/auth/oauth.py
@@ -1,8 +1,8 @@
+from authlib.integrations.starlette_client import OAuth
+from starlette.responses import RedirectResponse
from auth.identity import Identity
from auth.tokenstorage import TokenStorage
-from authlib.integrations.starlette_client import OAuth
-from settings import FRONTEND_URL, OAUTH_CLIENTS
-from starlette.responses import RedirectResponse
+from settings import OAUTH_CLIENTS, FRONTEND_URL
oauth = OAuth()
diff --git a/auth/tokenstorage.py b/auth/tokenstorage.py
index 79a1a9b6..c61aa848 100644
--- a/auth/tokenstorage.py
+++ b/auth/tokenstorage.py
@@ -1,8 +1,9 @@
-from auth.jwtcodec import JWTCodec
-from base.redis import redis
from datetime import datetime, timedelta, timezone
-from settings import ONETIME_TOKEN_LIFE_SPAN, SESSION_TOKEN_LIFE_SPAN
+
+from auth.jwtcodec import JWTCodec
from validations.auth import AuthInput
+from base.redis import redis
+from settings import SESSION_TOKEN_LIFE_SPAN, ONETIME_TOKEN_LIFE_SPAN
async def save(token_key, life_span, auto_delete=True):
@@ -34,7 +35,7 @@ class SessionToken:
class TokenStorage:
@staticmethod
async def get(token_key):
- print("[tokenstorage.get] " + token_key)
+ print('[tokenstorage.get] ' + token_key)
# 2041-user@domain.zn-eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VyX2lkIjoyMDQxLCJ1c2VybmFtZSI6ImFudG9uLnJld2luK3Rlc3QtbG9hZGNoYXRAZ21haWwuY29tIiwiZXhwIjoxNjcxNzgwNjE2LCJpYXQiOjE2NjkxODg2MTYsImlzcyI6ImRpc2NvdXJzIn0.Nml4oV6iMjMmc6xwM7lTKEZJKBXvJFEIZ-Up1C1rITQ
return await redis.execute("GET", token_key)
diff --git a/base/exceptions.py b/base/exceptions.py
index 2cf7bdeb..1f3344e7 100644
--- a/base/exceptions.py
+++ b/base/exceptions.py
@@ -1,7 +1,7 @@
from graphql.error import GraphQLError
-# TODO: remove traceback from logs for defined exceptions
+# TODO: remove traceback from logs for defined exceptions
class BaseHttpException(GraphQLError):
code = 500
diff --git a/base/orm.py b/base/orm.py
index 0ebb8de7..8d2e65ad 100644
--- a/base/orm.py
+++ b/base/orm.py
@@ -1,11 +1,15 @@
-from settings import DB_URL
-from sqlalchemy import Column, create_engine, Integer
+from typing import TypeVar, Any, Dict, Generic, Callable
+
+from sqlalchemy import create_engine, Column, Integer
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import Session
from sqlalchemy.sql.schema import Table
-from typing import Any, Callable, Dict, Generic, TypeVar
-engine = create_engine(DB_URL, echo=False, pool_size=10, max_overflow=20)
+from settings import DB_URL
+
+engine = create_engine(
+ DB_URL, echo=False, pool_size=10, max_overflow=20
+)
T = TypeVar("T")
@@ -43,7 +47,7 @@ class Base(declarative_base()):
def update(self, input):
column_names = self.__table__.columns.keys()
- for name, value in input.items():
+ for (name, value) in input.items():
if name in column_names:
setattr(self, name, value)
diff --git a/main.py b/main.py
index 8c4a7670..6bb17a86 100644
--- a/main.py
+++ b/main.py
@@ -1,29 +1,28 @@
+import asyncio
+import os
+from importlib import import_module
+from os.path import exists
from ariadne import load_schema_from_path, make_executable_schema
from ariadne.asgi import GraphQL
-from auth.authenticate import JWTAuthenticate
-from auth.oauth import oauth_authorize, oauth_login
-from base.redis import redis
-from base.resolvers import resolvers
-from importlib import import_module
-from orm import init_tables
-from os.path import exists
-from resolvers.auth import confirm_email_handler
-from resolvers.upload import upload_handler
-from services.main import storages_init
-from services.notifications.notification_service import notification_service
-from services.notifications.sse import sse_subscribe_handler
-from services.stat.viewed import ViewedStorage
-
-# from services.zine.gittask import GitTask
-from settings import DEV_SERVER_PID_FILE_NAME, SENTRY_DSN, SESSION_SECRET_KEY
from starlette.applications import Starlette
from starlette.middleware import Middleware
from starlette.middleware.authentication import AuthenticationMiddleware
from starlette.middleware.sessions import SessionMiddleware
from starlette.routing import Route
+from orm import init_tables
-import asyncio
-import os
+from auth.authenticate import JWTAuthenticate
+from auth.oauth import oauth_login, oauth_authorize
+from base.redis import redis
+from base.resolvers import resolvers
+from resolvers.auth import confirm_email_handler
+from resolvers.upload import upload_handler
+from services.main import storages_init
+from services.notifications.notification_service import notification_service
+from services.stat.viewed import ViewedStorage
+# from services.zine.gittask import GitTask
+from settings import DEV_SERVER_PID_FILE_NAME, SENTRY_DSN, SESSION_SECRET_KEY
+from services.notifications.sse import sse_subscribe_handler
import_module("resolvers")
schema = make_executable_schema(load_schema_from_path("schema.graphql"), resolvers) # type: ignore
@@ -47,10 +46,9 @@ async def start_up():
try:
import sentry_sdk
-
sentry_sdk.init(SENTRY_DSN)
except Exception as e:
- print("[sentry] init error")
+ print('[sentry] init error')
print(e)
@@ -59,7 +57,7 @@ async def dev_start_up():
await redis.connect()
return
else:
- with open(DEV_SERVER_PID_FILE_NAME, "w", encoding="utf-8") as f:
+ with open(DEV_SERVER_PID_FILE_NAME, 'w', encoding='utf-8') as f:
f.write(str(os.getpid()))
await start_up()
@@ -74,7 +72,7 @@ routes = [
Route("/oauth/{provider}", endpoint=oauth_login),
Route("/oauth-authorize", endpoint=oauth_authorize),
Route("/confirm/{token}", endpoint=confirm_email_handler),
- Route("/upload", endpoint=upload_handler, methods=["POST"]),
+ Route("/upload", endpoint=upload_handler, methods=['POST']),
Route("/subscribe/{user_id}", endpoint=sse_subscribe_handler),
]
@@ -84,7 +82,9 @@ app = Starlette(
middleware=middleware,
routes=routes,
)
-app.mount("/", GraphQL(schema))
+app.mount("/", GraphQL(
+ schema
+))
dev_app = Starlette(
debug=True,
@@ -93,4 +93,7 @@ dev_app = Starlette(
middleware=middleware,
routes=routes,
)
-dev_app.mount("/", GraphQL(schema, debug=True))
+dev_app.mount("/", GraphQL(
+ schema,
+ debug=True
+))
diff --git a/migrate.sh b/migrate.sh
index f63aac19..2c1189da 100644
--- a/migrate.sh
+++ b/migrate.sh
@@ -16,3 +16,4 @@ echo "Start migration"
python3 server.py migrate
if [ $? -ne 0 ]; then { echo "Migration failed, aborting." ; exit 1; } fi
echo 'Done!'
+
diff --git a/migration/__init__.py b/migration/__init__.py
index bf1ba8d8..468fa886 100644
--- a/migration/__init__.py
+++ b/migration/__init__.py
@@ -1,25 +1,24 @@
""" cmd managed migration """
+import asyncio
+import gc
+import json
+import sys
from datetime import datetime, timezone
+
+import bs4
+
from migration.export import export_mdx
from migration.tables.comments import migrate as migrateComment
from migration.tables.comments import migrate_2stage as migrateComment_2stage
from migration.tables.content_items import get_shout_slug
from migration.tables.content_items import migrate as migrateShout
-
-# from migration.tables.remarks import migrate as migrateRemark
+from migration.tables.remarks import migrate as migrateRemark
from migration.tables.topics import migrate as migrateTopic
-from migration.tables.users import migrate as migrateUser
+from migration.tables.users import migrate as migrateUser, post_migrate as users_post_migrate
from migration.tables.users import migrate_2stage as migrateUser_2stage
-from migration.tables.users import post_migrate as users_post_migrate
from orm import init_tables
from orm.reaction import Reaction
-import asyncio
-import bs4
-import gc
-import json
-import sys
-
TODAY = datetime.strftime(datetime.now(tz=timezone.utc), "%Y%m%d")
OLD_DATE = "2016-03-05 22:22:00.350000"
@@ -64,8 +63,16 @@ async def topics_handle(storage):
del storage["topics"]["by_slug"][oldslug]
storage["topics"]["by_oid"][oid] = storage["topics"]["by_slug"][newslug]
print("[migration] " + str(counter) + " topics migrated")
- print("[migration] " + str(len(storage["topics"]["by_oid"].values())) + " topics by oid")
- print("[migration] " + str(len(storage["topics"]["by_slug"].values())) + " topics by slug")
+ print(
+ "[migration] "
+ + str(len(storage["topics"]["by_oid"].values()))
+ + " topics by oid"
+ )
+ print(
+ "[migration] "
+ + str(len(storage["topics"]["by_slug"].values()))
+ + " topics by slug"
+ )
async def shouts_handle(storage, args):
@@ -110,10 +117,9 @@ async def shouts_handle(storage, args):
# print main counter
counter += 1
- print(
- "[migration] shouts_handle %d: %s @%s"
- % ((counter + 1), shout_dict["slug"], author["slug"])
- )
+ print('[migration] shouts_handle %d: %s @%s' % (
+ (counter + 1), shout_dict["slug"], author["slug"]
+ ))
b = bs4.BeautifulSoup(shout_dict["body"], "html.parser")
texts = [shout_dict["title"].lower().replace(r"[^а-яА-Яa-zA-Z]", "")]
@@ -132,13 +138,13 @@ async def shouts_handle(storage, args):
print("[migration] " + str(anonymous_author) + " authored by @anonymous")
-# async def remarks_handle(storage):
-# print("[migration] comments")
-# c = 0
-# for entry_remark in storage["remarks"]["data"]:
-# remark = await migrateRemark(entry_remark, storage)
-# c += 1
-# print("[migration] " + str(c) + " remarks migrated")
+async def remarks_handle(storage):
+ print("[migration] comments")
+ c = 0
+ for entry_remark in storage["remarks"]["data"]:
+ remark = await migrateRemark(entry_remark, storage)
+ c += 1
+ print("[migration] " + str(c) + " remarks migrated")
async def comments_handle(storage):
@@ -149,9 +155,9 @@ async def comments_handle(storage):
for oldcomment in storage["reactions"]["data"]:
if not oldcomment.get("deleted"):
reaction = await migrateComment(oldcomment, storage)
- if isinstance(reaction, str):
+ if type(reaction) == str:
missed_shouts[reaction] = oldcomment
- elif isinstance(reaction, Reaction):
+ elif type(reaction) == Reaction:
reaction = reaction.dict()
rid = reaction["id"]
oid = reaction["oid"]
@@ -208,7 +214,9 @@ def data_load():
tags_data = json.loads(open("migration/data/tags.json").read())
storage["topics"]["tags"] = tags_data
print("[migration.load] " + str(len(tags_data)) + " tags ")
- cats_data = json.loads(open("migration/data/content_item_categories.json").read())
+ cats_data = json.loads(
+ open("migration/data/content_item_categories.json").read()
+ )
storage["topics"]["cats"] = cats_data
print("[migration.load] " + str(len(cats_data)) + " cats ")
comments_data = json.loads(open("migration/data/comments.json").read())
@@ -227,7 +235,11 @@ def data_load():
storage["users"]["by_oid"][x["_id"]] = x
# storage['users']['by_slug'][x['slug']] = x
# no user.slug yet
- print("[migration.load] " + str(len(storage["users"]["by_oid"].keys())) + " users by oid")
+ print(
+ "[migration.load] "
+ + str(len(storage["users"]["by_oid"].keys()))
+ + " users by oid"
+ )
for x in tags_data:
storage["topics"]["by_oid"][x["_id"]] = x
storage["topics"]["by_slug"][x["slug"]] = x
@@ -235,7 +247,9 @@ def data_load():
storage["topics"]["by_oid"][x["_id"]] = x
storage["topics"]["by_slug"][x["slug"]] = x
print(
- "[migration.load] " + str(len(storage["topics"]["by_slug"].keys())) + " topics by slug"
+ "[migration.load] "
+ + str(len(storage["topics"]["by_slug"].keys()))
+ + " topics by slug"
)
for item in content_data:
slug = get_shout_slug(item)
diff --git a/migration/bson2json.py b/migration/bson2json.py
index 66507791..03effe19 100644
--- a/migration/bson2json.py
+++ b/migration/bson2json.py
@@ -1,9 +1,9 @@
-from .utils import DateTimeEncoder
+import json
+import os
import bson
import gc
-import json
-import os
+from .utils import DateTimeEncoder
def json_tables():
@@ -15,10 +15,10 @@ def json_tables():
"email_subscriptions": [],
"users": [],
"comments": [],
- "remarks": [],
+ "remarks": []
}
for table in data.keys():
- print("[migration] bson2json for " + table)
+ print('[migration] bson2json for ' + table)
gc.collect()
lc = []
bs = open("dump/discours/" + table + ".bson", "rb").read()
diff --git a/migration/export.py b/migration/export.py
index 4105a220..102cfb14 100644
--- a/migration/export.py
+++ b/migration/export.py
@@ -1,10 +1,11 @@
-from .extract import extract_html, extract_media
-from .utils import DateTimeEncoder
+import json
+import os
from datetime import datetime, timezone
import frontmatter
-import json
-import os
+
+from .extract import extract_html, extract_media
+from .utils import DateTimeEncoder
OLD_DATE = "2016-03-05 22:22:00.350000"
EXPORT_DEST = "../discoursio-web/data/"
@@ -70,29 +71,47 @@ def export_slug(slug, storage):
def export_email_subscriptions():
- email_subscriptions_data = json.loads(open("migration/data/email_subscriptions.json").read())
+ email_subscriptions_data = json.loads(
+ open("migration/data/email_subscriptions.json").read()
+ )
for data in email_subscriptions_data:
# TODO: migrate to mailgun list manually
# migrate_email_subscription(data)
pass
- print("[migration] " + str(len(email_subscriptions_data)) + " email subscriptions exported")
+ print(
+ "[migration] "
+ + str(len(email_subscriptions_data))
+ + " email subscriptions exported"
+ )
def export_shouts(storage):
# update what was just migrated or load json again
if len(storage["users"]["by_slugs"].keys()) == 0:
- storage["users"]["by_slugs"] = json.loads(open(EXPORT_DEST + "authors.json").read())
- print("[migration] " + str(len(storage["users"]["by_slugs"].keys())) + " exported authors ")
- if len(storage["shouts"]["by_slugs"].keys()) == 0:
- storage["shouts"]["by_slugs"] = json.loads(open(EXPORT_DEST + "articles.json").read())
+ storage["users"]["by_slugs"] = json.loads(
+ open(EXPORT_DEST + "authors.json").read()
+ )
print(
- "[migration] " + str(len(storage["shouts"]["by_slugs"].keys())) + " exported articles "
+ "[migration] "
+ + str(len(storage["users"]["by_slugs"].keys()))
+ + " exported authors "
+ )
+ if len(storage["shouts"]["by_slugs"].keys()) == 0:
+ storage["shouts"]["by_slugs"] = json.loads(
+ open(EXPORT_DEST + "articles.json").read()
+ )
+ print(
+ "[migration] "
+ + str(len(storage["shouts"]["by_slugs"].keys()))
+ + " exported articles "
)
for slug in storage["shouts"]["by_slugs"].keys():
export_slug(slug, storage)
-def export_json(export_articles={}, export_authors={}, export_topics={}, export_comments={}):
+def export_json(
+ export_articles={}, export_authors={}, export_topics={}, export_comments={}
+):
open(EXPORT_DEST + "authors.json", "w").write(
json.dumps(
export_authors,
@@ -133,4 +152,8 @@ def export_json(export_articles={}, export_authors={}, export_topics={}, export_
ensure_ascii=False,
)
)
- print("[migration] " + str(len(export_comments.items())) + " exported articles with comments")
+ print(
+ "[migration] "
+ + str(len(export_comments.items()))
+ + " exported articles with comments"
+ )
diff --git a/migration/extract.py b/migration/extract.py
index eca8f8d0..fcd293e6 100644
--- a/migration/extract.py
+++ b/migration/extract.py
@@ -1,10 +1,9 @@
-from bs4 import BeautifulSoup
-
import base64
import os
import re
+import uuid
-# import uuid
+from bs4 import BeautifulSoup
TOOLTIP_REGEX = r"(\/\/\/(.+)\/\/\/)"
@@ -28,40 +27,37 @@ def replace_tooltips(body):
return newbody
-# def extract_footnotes(body, shout_dict):
-# parts = body.split("&&&")
-# lll = len(parts)
-# newparts = list(parts)
-# placed = False
-# if lll & 1:
-# if lll > 1:
-# i = 1
-# print("[extract] found %d footnotes in body" % (lll - 1))
-# for part in parts[1:]:
-# if i & 1:
-# placed = True
-# if 'a class="footnote-url" href=' in part:
-# print("[extract] footnote: " + part)
-# fn = 'a class="footnote-url" href="'
-# # exxtracted_link = part.split(fn, 1)[1].split('"', 1)[0]
-# extracted_body = part.split(fn, 1)[1].split(">", 1)[1].split("", 1)[0]
-# print("[extract] footnote link: " + extracted_link)
-# with local_session() as session:
-# Reaction.create(
-# {
-# "shout": shout_dict["id"],
-# "kind": ReactionKind.FOOTNOTE,
-# "body": extracted_body,
-# "range": str(body.index(fn + link) - len("<"))
-# + ":"
-# + str(body.index(extracted_body) + len("")),
-# }
-# )
-# newparts[i] = "ℹ️"
-# else:
-# newparts[i] = part
-# i += 1
-# return ("".join(newparts), placed)
+
+def extract_footnotes(body, shout_dict):
+ parts = body.split("&&&")
+ lll = len(parts)
+ newparts = list(parts)
+ placed = False
+ if lll & 1:
+ if lll > 1:
+ i = 1
+ print("[extract] found %d footnotes in body" % (lll - 1))
+ for part in parts[1:]:
+ if i & 1:
+ placed = True
+ if 'a class="footnote-url" href=' in part:
+ print("[extract] footnote: " + part)
+ fn = 'a class="footnote-url" href="'
+ exxtracted_link = part.split(fn, 1)[1].split('"', 1)[0]
+ extracted_body = part.split(fn, 1)[1].split('>', 1)[1].split('', 1)[0]
+ print("[extract] footnote link: " + extracted_link)
+ with local_session() as session:
+ Reaction.create({
+ "shout": shout_dict['id'],
+ "kind": ReactionKind.FOOTNOTE,
+ "body": extracted_body,
+ "range": str(body.index(fn + link) - len('<')) + ':' + str(body.index(extracted_body) + len(''))
+ })
+ newparts[i] = "ℹ️"
+ else:
+ newparts[i] = part
+ i += 1
+ return ("".join(newparts), placed)
def place_tooltips(body):
@@ -80,7 +76,9 @@ def place_tooltips(body):
print("[extract] footnote: " + part)
fn = 'a class="footnote-url" href="'
link = part.split(fn, 1)[1].split('"', 1)[0]
- extracted_part = part.split(fn, 1)[0] + " " + part.split("/", 1)[-1]
+ extracted_part = (
+ part.split(fn, 1)[0] + " " + part.split("/", 1)[-1]
+ )
newparts[i] = (
" (i + 1)
else "".join(newparts)
)
@@ -230,6 +237,7 @@ di = "data:image"
def extract_md_images(body, prefix):
+ newbody = ""
body = (
body.replace("\n! [](" + di, "\n 
.replace("\n[](" + di, "\n
@@ -237,10 +245,10 @@ def extract_md_images(body, prefix):
)
parts = body.split(di)
if len(parts) > 1:
- new_body = extract_dataimages(parts, prefix)
+ newbody = extract_dataimages(parts, prefix)
else:
- new_body = body
- return new_body
+ newbody = body
+ return newbody
def cleanup_md(body):
@@ -263,28 +271,29 @@ def cleanup_md(body):
return newbody
-# def extract_md(body, shout_dict=None):
-# newbody = body
-# if newbody:
-# newbody = cleanup_md(newbody)
-# if not newbody:
-# raise Exception("cleanup error")
-#
-# if shout_dict:
-# uid = shout_dict["id"] or uuid.uuid4()
-# newbody = extract_md_images(newbody, uid)
-# if not newbody:
-# raise Exception("extract_images error")
-#
-# newbody, placed = extract_footnotes(body, shout_dict)
-# if not newbody:
-# raise Exception("extract_footnotes error")
-#
-# return newbody
+def extract_md(body, shout_dict = None):
+ newbody = body
+ if newbody:
+ newbody = cleanup_md(newbody)
+ if not newbody:
+ raise Exception("cleanup error")
+
+ if shout_dict:
+
+ uid = shout_dict['id'] or uuid.uuid4()
+ newbody = extract_md_images(newbody, uid)
+ if not newbody:
+ raise Exception("extract_images error")
+
+ newbody, placed = extract_footnotes(body, shout_dict)
+ if not newbody:
+ raise Exception("extract_footnotes error")
+
+ return newbody
def extract_media(entry):
- """normalized media extraction method"""
+ ''' normalized media extraction method '''
# media [ { title pic url body } ]}
kind = entry.get("type")
if not kind:
@@ -314,7 +323,12 @@ def extract_media(entry):
url = "https://vimeo.com/" + m["vimeoId"]
# body
body = m.get("body") or m.get("literatureBody") or ""
- media.append({"url": url, "pic": pic, "title": title, "body": body})
+ media.append({
+ "url": url,
+ "pic": pic,
+ "title": title,
+ "body": body
+ })
return media
@@ -384,7 +398,9 @@ def cleanup_html(body: str) -> str:
r"\s*
",
r"\s*",
]
- regex_replace = {r"
\s*": ""}
+ regex_replace = {
+ r"
\s*": ""
+ }
changed = True
while changed:
# we need several iterations to clean nested tags this way
@@ -398,15 +414,16 @@ def cleanup_html(body: str) -> str:
changed = True
return new_body
-
-def extract_html(entry, cleanup=False):
- body_orig = (entry.get("body") or "").replace(r"\(", "(").replace(r"\)", ")")
+def extract_html(entry, shout_id = None, cleanup=False):
+ body_orig = (entry.get("body") or "").replace('\(', '(').replace('\)', ')')
if cleanup:
# we do that before bs parsing to catch the invalid html
body_clean = cleanup_html(body_orig)
if body_clean != body_orig:
print(f"[migration] html cleaned for slug {entry.get('slug', None)}")
body_orig = body_clean
+ if shout_id:
+ extract_footnotes(body_orig, shout_id)
body_html = str(BeautifulSoup(body_orig, features="html.parser"))
if cleanup:
# we do that after bs parsing because it can add dummy tags
diff --git a/migration/html2text/__init__.py b/migration/html2text/__init__.py
index c99afc59..1090025c 100644
--- a/migration/html2text/__init__.py
+++ b/migration/html2text/__init__.py
@@ -1,5 +1,13 @@
"""html2text: Turn HTML into equivalent Markdown-structured text."""
+import html.entities
+import html.parser
+import re
+import string
+import urllib.parse as urlparse
+from textwrap import wrap
+from typing import Dict, List, Optional, Tuple, Union
+
from . import config
from .elements import AnchorElement, ListElement
from .typing import OutCallback
@@ -18,14 +26,6 @@ from .utils import (
skipwrap,
unifiable_n,
)
-from textwrap import wrap
-from typing import Dict, List, Optional, Tuple, Union
-
-import html.entities
-import html.parser
-import re
-import string
-import urllib.parse as urlparse
__version__ = (2020, 1, 16)
@@ -119,7 +119,9 @@ class HTML2Text(html.parser.HTMLParser):
self.lastWasList = False
self.style = 0
self.style_def = {} # type: Dict[str, Dict[str, str]]
- self.tag_stack = [] # type: List[Tuple[str, Dict[str, Optional[str]], Dict[str, str]]]
+ self.tag_stack = (
+ []
+ ) # type: List[Tuple[str, Dict[str, Optional[str]], Dict[str, str]]]
self.emphasis = 0
self.drop_white_space = 0
self.inheader = False
@@ -298,7 +300,9 @@ class HTML2Text(html.parser.HTMLParser):
if strikethrough:
self.quiet -= 1
- def handle_tag(self, tag: str, attrs: Dict[str, Optional[str]], start: bool) -> None:
+ def handle_tag(
+ self, tag: str, attrs: Dict[str, Optional[str]], start: bool
+ ) -> None:
self.current_tag = tag
if self.tag_callback is not None:
@@ -329,7 +333,9 @@ class HTML2Text(html.parser.HTMLParser):
tag_style = element_style(attrs, self.style_def, parent_style)
self.tag_stack.append((tag, attrs, tag_style))
else:
- dummy, attrs, tag_style = self.tag_stack.pop() if self.tag_stack else (None, {}, {})
+ dummy, attrs, tag_style = (
+ self.tag_stack.pop() if self.tag_stack else (None, {}, {})
+ )
if self.tag_stack:
parent_style = self.tag_stack[-1][2]
@@ -379,7 +385,11 @@ class HTML2Text(html.parser.HTMLParser):
):
self.o("`") # NOTE: same as
self.span_highlight = True
- elif self.current_class == "lead" and not self.inheader and not self.span_highlight:
+ elif (
+ self.current_class == "lead"
+ and not self.inheader
+ and not self.span_highlight
+ ):
# self.o("==") # NOTE: CriticMarkup {==
self.span_lead = True
else:
@@ -469,7 +479,11 @@ class HTML2Text(html.parser.HTMLParser):
and not self.span_lead
and not self.span_highlight
):
- if start and self.preceding_data and self.preceding_data[-1] == self.strong_mark[0]:
+ if (
+ start
+ and self.preceding_data
+ and self.preceding_data[-1] == self.strong_mark[0]
+ ):
strong = " " + self.strong_mark
self.preceding_data += " "
else:
@@ -534,8 +548,13 @@ class HTML2Text(html.parser.HTMLParser):
"href" in attrs
and not attrs["href"].startswith("#_ftn")
and attrs["href"] is not None
- and not (self.skip_internal_links and attrs["href"].startswith("#"))
- and not (self.ignore_mailto_links and attrs["href"].startswith("mailto:"))
+ and not (
+ self.skip_internal_links and attrs["href"].startswith("#")
+ )
+ and not (
+ self.ignore_mailto_links
+ and attrs["href"].startswith("mailto:")
+ )
):
self.astack.append(attrs)
self.maybe_automatic_link = attrs["href"]
@@ -619,7 +638,9 @@ class HTML2Text(html.parser.HTMLParser):
self.o("![" + escape_md(alt) + "]")
if self.inline_links:
href = attrs.get("href") or ""
- self.o("(" + escape_md(urlparse.urljoin(self.baseurl, href)) + ")")
+ self.o(
+ "(" + escape_md(urlparse.urljoin(self.baseurl, href)) + ")"
+ )
else:
i = self.previousIndex(attrs)
if i is not None:
@@ -675,7 +696,9 @@ class HTML2Text(html.parser.HTMLParser):
# WARNING: does not line up - s > 9 correctly.
parent_list = None
for list in self.list:
- self.o(" " if parent_list == "ol" and list.name == "ul" else " ")
+ self.o(
+ " " if parent_list == "ol" and list.name == "ul" else " "
+ )
parent_list = list.name
if li.name == "ul":
@@ -764,7 +787,9 @@ class HTML2Text(html.parser.HTMLParser):
self.pbr()
self.br_toggle = " "
- def o(self, data: str, puredata: bool = False, force: Union[bool, str] = False) -> None:
+ def o(
+ self, data: str, puredata: bool = False, force: Union[bool, str] = False
+ ) -> None:
"""
Deal with indentation and whitespace
"""
@@ -839,7 +864,9 @@ class HTML2Text(html.parser.HTMLParser):
self.out(" ")
self.space = False
- if self.a and ((self.p_p == 2 and self.links_each_paragraph) or force == "end"):
+ if self.a and (
+ (self.p_p == 2 and self.links_each_paragraph) or force == "end"
+ ):
if force == "end":
self.out("\n")
@@ -898,7 +925,11 @@ class HTML2Text(html.parser.HTMLParser):
if self.maybe_automatic_link is not None:
href = self.maybe_automatic_link
- if href == data and self.absolute_url_matcher.match(href) and self.use_automatic_links:
+ if (
+ href == data
+ and self.absolute_url_matcher.match(href)
+ and self.use_automatic_links
+ ):
self.o("<" + data + ">")
self.empty_link = False
return
@@ -969,7 +1000,9 @@ class HTML2Text(html.parser.HTMLParser):
self.inline_links = False
for para in text.split("\n"):
if len(para) > 0:
- if not skipwrap(para, self.wrap_links, self.wrap_list_items, self.wrap_tables):
+ if not skipwrap(
+ para, self.wrap_links, self.wrap_list_items, self.wrap_tables
+ ):
indent = ""
if para.startswith(" " + self.ul_item_mark):
# list item continuation: add a double indent to the
@@ -1010,7 +1043,9 @@ class HTML2Text(html.parser.HTMLParser):
return result
-def html2text(html: str, baseurl: str = "", bodywidth: Optional[int] = config.BODY_WIDTH) -> str:
+def html2text(
+ html: str, baseurl: str = "", bodywidth: Optional[int] = config.BODY_WIDTH
+) -> str:
h = html.strip() or ""
if h:
h = HTML2Text(baseurl=baseurl, bodywidth=bodywidth)
diff --git a/migration/html2text/cli.py b/migration/html2text/cli.py
index f6cf3c57..dbaba28b 100644
--- a/migration/html2text/cli.py
+++ b/migration/html2text/cli.py
@@ -1,8 +1,8 @@
-from . import __version__, config, HTML2Text
-
import argparse
import sys
+from . import HTML2Text, __version__, config
+
# noinspection DuplicatedCode
def main() -> None:
@@ -117,7 +117,10 @@ def main() -> None:
dest="images_with_size",
action="store_true",
default=config.IMAGES_WITH_SIZE,
- help=("Write image tags with height and width attrs as raw html to retain " "dimensions"),
+ help=(
+ "Write image tags with height and width attrs as raw html to retain "
+ "dimensions"
+ ),
)
p.add_argument(
"-g",
@@ -257,7 +260,9 @@ def main() -> None:
default=config.CLOSE_QUOTE,
help="The character used to close quotes",
)
- p.add_argument("--version", action="version", version=".".join(map(str, __version__)))
+ p.add_argument(
+ "--version", action="version", version=".".join(map(str, __version__))
+ )
p.add_argument("filename", nargs="?")
p.add_argument("encoding", nargs="?", default="utf-8")
args = p.parse_args()
diff --git a/migration/html2text/utils.py b/migration/html2text/utils.py
index 545bbd17..1cf22b52 100644
--- a/migration/html2text/utils.py
+++ b/migration/html2text/utils.py
@@ -1,10 +1,12 @@
-from . import config
+import html.entities
from typing import Dict, List, Optional
-import html.entities
+from . import config
unifiable_n = {
- html.entities.name2codepoint[k]: v for k, v in config.UNIFIABLE.items() if k != "nbsp"
+ html.entities.name2codepoint[k]: v
+ for k, v in config.UNIFIABLE.items()
+ if k != "nbsp"
}
@@ -154,7 +156,9 @@ def list_numbering_start(attrs: Dict[str, Optional[str]]) -> int:
return 0
-def skipwrap(para: str, wrap_links: bool, wrap_list_items: bool, wrap_tables: bool) -> bool:
+def skipwrap(
+ para: str, wrap_links: bool, wrap_list_items: bool, wrap_tables: bool
+) -> bool:
# If it appears to contain a link
# don't wrap
if not wrap_links and config.RE_LINK.search(para):
@@ -232,7 +236,9 @@ def reformat_table(lines: List[str], right_margin: int) -> List[str]:
max_width += [len(x) + right_margin for x in cols[-(num_cols - max_cols) :]]
max_cols = num_cols
- max_width = [max(len(x) + right_margin, old_len) for x, old_len in zip(cols, max_width)]
+ max_width = [
+ max(len(x) + right_margin, old_len) for x, old_len in zip(cols, max_width)
+ ]
# reformat
new_lines = []
@@ -241,13 +247,15 @@ def reformat_table(lines: List[str], right_margin: int) -> List[str]:
if set(line.strip()) == set("-|"):
filler = "-"
new_cols = [
- x.rstrip() + (filler * (M - len(x.rstrip()))) for x, M in zip(cols, max_width)
+ x.rstrip() + (filler * (M - len(x.rstrip())))
+ for x, M in zip(cols, max_width)
]
new_lines.append("|-" + "|".join(new_cols) + "|")
else:
filler = " "
new_cols = [
- x.rstrip() + (filler * (M - len(x.rstrip()))) for x, M in zip(cols, max_width)
+ x.rstrip() + (filler * (M - len(x.rstrip())))
+ for x, M in zip(cols, max_width)
]
new_lines.append("| " + "|".join(new_cols) + "|")
return new_lines
diff --git a/migration/tables/comments.py b/migration/tables/comments.py
index 13d2809d..82e32924 100644
--- a/migration/tables/comments.py
+++ b/migration/tables/comments.py
@@ -1,50 +1,65 @@
-from base.orm import local_session
from datetime import datetime, timezone
+
from dateutil.parser import parse as date_parse
+
+from base.orm import local_session
from migration.html2text import html2text
from orm.reaction import Reaction, ReactionKind
-from orm.shout import Shout, ShoutReactionsFollower
+from orm.shout import ShoutReactionsFollower
from orm.topic import TopicFollower
from orm.user import User
+from orm.shout import Shout
ts = datetime.now(tz=timezone.utc)
def auto_followers(session, topics, reaction_dict):
# creating shout's reactions following for reaction author
- following1 = (
- session.query(ShoutReactionsFollower)
- .where(ShoutReactionsFollower.follower == reaction_dict["createdBy"])
- .filter(ShoutReactionsFollower.shout == reaction_dict["shout"])
- .first()
- )
+ following1 = session.query(
+ ShoutReactionsFollower
+ ).where(
+ ShoutReactionsFollower.follower == reaction_dict["createdBy"]
+ ).filter(
+ ShoutReactionsFollower.shout == reaction_dict["shout"]
+ ).first()
if not following1:
following1 = ShoutReactionsFollower.create(
- follower=reaction_dict["createdBy"], shout=reaction_dict["shout"], auto=True
+ follower=reaction_dict["createdBy"],
+ shout=reaction_dict["shout"],
+ auto=True
)
session.add(following1)
# creating topics followings for reaction author
for t in topics:
- tf = (
- session.query(TopicFollower)
- .where(TopicFollower.follower == reaction_dict["createdBy"])
- .filter(TopicFollower.topic == t["id"])
- .first()
- )
+ tf = session.query(
+ TopicFollower
+ ).where(
+ TopicFollower.follower == reaction_dict["createdBy"]
+ ).filter(
+ TopicFollower.topic == t['id']
+ ).first()
if not tf:
topic_following = TopicFollower.create(
- follower=reaction_dict["createdBy"], topic=t["id"], auto=True
+ follower=reaction_dict["createdBy"],
+ topic=t['id'],
+ auto=True
)
session.add(topic_following)
def migrate_ratings(session, entry, reaction_dict):
for comment_rating_old in entry.get("ratings", []):
- rater = session.query(User).filter(User.oid == comment_rating_old["createdBy"]).first()
+ rater = (
+ session.query(User)
+ .filter(User.oid == comment_rating_old["createdBy"])
+ .first()
+ )
re_reaction_dict = {
"shout": reaction_dict["shout"],
"replyTo": reaction_dict["id"],
- "kind": ReactionKind.LIKE if comment_rating_old["value"] > 0 else ReactionKind.DISLIKE,
+ "kind": ReactionKind.LIKE
+ if comment_rating_old["value"] > 0
+ else ReactionKind.DISLIKE,
"createdBy": rater.id if rater else 1,
}
cts = comment_rating_old.get("createdAt")
@@ -53,15 +68,18 @@ def migrate_ratings(session, entry, reaction_dict):
try:
# creating reaction from old rating
rr = Reaction.create(**re_reaction_dict)
- following2 = (
- session.query(ShoutReactionsFollower)
- .where(ShoutReactionsFollower.follower == re_reaction_dict["createdBy"])
- .filter(ShoutReactionsFollower.shout == rr.shout)
- .first()
- )
+ following2 = session.query(
+ ShoutReactionsFollower
+ ).where(
+ ShoutReactionsFollower.follower == re_reaction_dict['createdBy']
+ ).filter(
+ ShoutReactionsFollower.shout == rr.shout
+ ).first()
if not following2:
following2 = ShoutReactionsFollower.create(
- follower=re_reaction_dict["createdBy"], shout=rr.shout, auto=True
+ follower=re_reaction_dict['createdBy'],
+ shout=rr.shout,
+ auto=True
)
session.add(following2)
session.add(rr)
@@ -132,7 +150,9 @@ async def migrate(entry, storage):
else:
stage = "author and old id found"
try:
- shout = session.query(Shout).where(Shout.slug == old_shout["slug"]).one()
+ shout = session.query(
+ Shout
+ ).where(Shout.slug == old_shout["slug"]).one()
if shout:
reaction_dict["shout"] = shout.id
reaction_dict["createdBy"] = author.id if author else 1
@@ -158,9 +178,9 @@ async def migrate(entry, storage):
def migrate_2stage(old_comment, idmap):
- if old_comment.get("body"):
- new_id = idmap.get(old_comment.get("oid"))
- new_id = idmap.get(old_comment.get("_id"))
+ if old_comment.get('body'):
+ new_id = idmap.get(old_comment.get('oid'))
+ new_id = idmap.get(old_comment.get('_id'))
if new_id:
new_replyto_id = None
old_replyto_id = old_comment.get("replyTo")
@@ -170,20 +190,17 @@ def migrate_2stage(old_comment, idmap):
comment = session.query(Reaction).where(Reaction.id == new_id).first()
try:
if new_replyto_id:
- new_reply = (
- session.query(Reaction).where(Reaction.id == new_replyto_id).first()
- )
+ new_reply = session.query(Reaction).where(Reaction.id == new_replyto_id).first()
if not new_reply:
print(new_replyto_id)
raise Exception("cannot find reply by id!")
comment.replyTo = new_reply.id
session.add(comment)
- srf = (
- session.query(ShoutReactionsFollower)
- .where(ShoutReactionsFollower.shout == comment.shout)
- .filter(ShoutReactionsFollower.follower == comment.createdBy)
- .first()
- )
+ srf = session.query(ShoutReactionsFollower).where(
+ ShoutReactionsFollower.shout == comment.shout
+ ).filter(
+ ShoutReactionsFollower.follower == comment.createdBy
+ ).first()
if not srf:
srf = ShoutReactionsFollower.create(
shout=comment.shout, follower=comment.createdBy, auto=True
diff --git a/migration/tables/content_items.py b/migration/tables/content_items.py
index 053a8a97..a2297d98 100644
--- a/migration/tables/content_items.py
+++ b/migration/tables/content_items.py
@@ -1,16 +1,15 @@
-from base.orm import local_session
from datetime import datetime, timezone
+import json
from dateutil.parser import parse as date_parse
-from migration.extract import extract_html, extract_media
-from orm.reaction import Reaction, ReactionKind
-from orm.shout import Shout, ShoutReactionsFollower, ShoutTopic
-from orm.topic import Topic, TopicFollower
-from orm.user import User
-from services.stat.viewed import ViewedStorage
from sqlalchemy.exc import IntegrityError
from transliterate import translit
-
-import json
+from base.orm import local_session
+from migration.extract import extract_html, extract_media
+from orm.reaction import Reaction, ReactionKind
+from orm.shout import Shout, ShoutTopic, ShoutReactionsFollower
+from orm.user import User
+from orm.topic import TopicFollower, Topic
+from services.stat.viewed import ViewedStorage
import re
OLD_DATE = "2016-03-05 22:22:00.350000"
@@ -34,7 +33,7 @@ def get_shout_slug(entry):
slug = friend.get("slug", "")
if slug:
break
- slug = re.sub("[^0-9a-zA-Z]+", "-", slug)
+ slug = re.sub('[^0-9a-zA-Z]+', '-', slug)
return slug
@@ -42,27 +41,27 @@ def create_author_from_app(app):
user = None
userdata = None
# check if email is used
- if app["email"]:
+ if app['email']:
with local_session() as session:
- user = session.query(User).where(User.email == app["email"]).first()
+ user = session.query(User).where(User.email == app['email']).first()
if not user:
# print('[migration] app %r' % app)
- name = app.get("name")
+ name = app.get('name')
if name:
slug = translit(name, "ru", reversed=True).lower()
- slug = re.sub("[^0-9a-zA-Z]+", "-", slug)
- print("[migration] created slug %s" % slug)
+ slug = re.sub('[^0-9a-zA-Z]+', '-', slug)
+ print('[migration] created slug %s' % slug)
# check if slug is used
if slug:
user = session.query(User).where(User.slug == slug).first()
# get slug from email
if user:
- slug = app["email"].split("@")[0]
+ slug = app['email'].split('@')[0]
user = session.query(User).where(User.slug == slug).first()
# one more try
if user:
- slug += "-author"
+ slug += '-author'
user = session.query(User).where(User.slug == slug).first()
# create user with application data
@@ -80,7 +79,7 @@ def create_author_from_app(app):
user = User.create(**userdata)
session.add(user)
session.commit()
- userdata["id"] = user.id
+ userdata['id'] = user.id
userdata = user.dict()
return userdata
@@ -92,12 +91,11 @@ async def create_shout(shout_dict):
s = Shout.create(**shout_dict)
author = s.authors[0]
with local_session() as session:
- srf = (
- session.query(ShoutReactionsFollower)
- .where(ShoutReactionsFollower.shout == s.id)
- .filter(ShoutReactionsFollower.follower == author.id)
- .first()
- )
+ srf = session.query(ShoutReactionsFollower).where(
+ ShoutReactionsFollower.shout == s.id
+ ).filter(
+ ShoutReactionsFollower.follower == author.id
+ ).first()
if not srf:
srf = ShoutReactionsFollower.create(shout=s.id, follower=author.id, auto=True)
session.add(srf)
@@ -118,14 +116,14 @@ async def get_user(entry, storage):
elif user_oid:
userdata = storage["users"]["by_oid"].get(user_oid)
if not userdata:
- print("no userdata by oid, anonymous")
+ print('no userdata by oid, anonymous')
userdata = anondict
print(app)
# cleanup slug
if userdata:
slug = userdata.get("slug", "")
if slug:
- slug = re.sub("[^0-9a-zA-Z]+", "-", slug)
+ slug = re.sub('[^0-9a-zA-Z]+', '-', slug)
userdata["slug"] = slug
else:
userdata = anondict
@@ -139,14 +137,11 @@ async def migrate(entry, storage):
r = {
"layout": type2layout[entry["type"]],
"title": entry["title"],
- "authors": [
- author,
- ],
+ "authors": [author, ],
"slug": get_shout_slug(entry),
"cover": (
- "https://images.discours.io/unsafe/" + entry["thumborId"]
- if entry.get("thumborId")
- else entry.get("image", {}).get("url")
+ "https://images.discours.io/unsafe/" +
+ entry["thumborId"] if entry.get("thumborId") else entry.get("image", {}).get("url")
),
"visibility": "public" if entry.get("published") else "community",
"publishedAt": date_parse(entry.get("publishedAt")) if entry.get("published") else None,
@@ -155,11 +150,11 @@ async def migrate(entry, storage):
"updatedAt": date_parse(entry["updatedAt"]) if "updatedAt" in entry else ts,
"createdBy": author.id,
"topics": await add_topics_follower(entry, storage, author),
- "body": extract_html(entry, cleanup=True),
+ "body": extract_html(entry, cleanup=True)
}
# main topic patch
- r["mainTopic"] = r["topics"][0]
+ r['mainTopic'] = r['topics'][0]
# published author auto-confirm
if entry.get("published"):
@@ -182,16 +177,14 @@ async def migrate(entry, storage):
shout_dict["oid"] = entry.get("_id", "")
shout = await create_shout(shout_dict)
except IntegrityError as e:
- print("[migration] create_shout integrity error", e)
+ print('[migration] create_shout integrity error', e)
shout = await resolve_create_shout(shout_dict)
except Exception as e:
raise Exception(e)
# udpate data
shout_dict = shout.dict()
- shout_dict["authors"] = [
- author.dict(),
- ]
+ shout_dict["authors"] = [author.dict(), ]
# shout topics aftermath
shout_dict["topics"] = await topics_aftermath(r, storage)
@@ -200,9 +193,7 @@ async def migrate(entry, storage):
await content_ratings_to_reactions(entry, shout_dict["slug"])
# shout views
- await ViewedStorage.increment(
- shout_dict["slug"], amount=entry.get("views", 1), viewer="old-discours"
- )
+ await ViewedStorage.increment(shout_dict["slug"], amount=entry.get("views", 1), viewer='old-discours')
# del shout_dict['ratings']
storage["shouts"]["by_oid"][entry["_id"]] = shout_dict
@@ -214,9 +205,7 @@ async def add_topics_follower(entry, storage, user):
topics = set([])
category = entry.get("category")
topics_by_oid = storage["topics"]["by_oid"]
- oids = [
- category,
- ] + entry.get("tags", [])
+ oids = [category, ] + entry.get("tags", [])
for toid in oids:
tslug = topics_by_oid.get(toid, {}).get("slug")
if tslug:
@@ -228,18 +217,23 @@ async def add_topics_follower(entry, storage, user):
try:
tpc = session.query(Topic).where(Topic.slug == tpcslug).first()
if tpc:
- tf = (
- session.query(TopicFollower)
- .where(TopicFollower.follower == user.id)
- .filter(TopicFollower.topic == tpc.id)
- .first()
- )
+ tf = session.query(
+ TopicFollower
+ ).where(
+ TopicFollower.follower == user.id
+ ).filter(
+ TopicFollower.topic == tpc.id
+ ).first()
if not tf:
- tf = TopicFollower.create(topic=tpc.id, follower=user.id, auto=True)
+ tf = TopicFollower.create(
+ topic=tpc.id,
+ follower=user.id,
+ auto=True
+ )
session.add(tf)
session.commit()
except IntegrityError:
- print("[migration.shout] hidden by topic " + tpc.slug)
+ print('[migration.shout] hidden by topic ' + tpc.slug)
# main topic
maintopic = storage["replacements"].get(topics_by_oid.get(category, {}).get("slug"))
if maintopic in ttt:
@@ -260,7 +254,7 @@ async def process_user(userdata, storage, oid):
if not user:
try:
slug = userdata["slug"].lower().strip()
- slug = re.sub("[^0-9a-zA-Z]+", "-", slug)
+ slug = re.sub('[^0-9a-zA-Z]+', '-', slug)
userdata["slug"] = slug
user = User.create(**userdata)
session.add(user)
@@ -288,9 +282,9 @@ async def resolve_create_shout(shout_dict):
s = session.query(Shout).filter(Shout.slug == shout_dict["slug"]).first()
bump = False
if s:
- if s.createdAt != shout_dict["createdAt"]:
+ if s.createdAt != shout_dict['createdAt']:
# create new with different slug
- shout_dict["slug"] += "-" + shout_dict["layout"]
+ shout_dict["slug"] += '-' + shout_dict["layout"]
try:
await create_shout(shout_dict)
except IntegrityError as e:
@@ -301,7 +295,10 @@ async def resolve_create_shout(shout_dict):
for key in shout_dict:
if key in s.__dict__:
if s.__dict__[key] != shout_dict[key]:
- print("[migration] shout already exists, but differs in %s" % key)
+ print(
+ "[migration] shout already exists, but differs in %s"
+ % key
+ )
bump = True
else:
print("[migration] shout already exists, but lacks %s" % key)
@@ -347,7 +344,9 @@ async def topics_aftermath(entry, storage):
)
if not shout_topic_new:
try:
- ShoutTopic.create(**{"shout": shout.id, "topic": new_topic.id})
+ ShoutTopic.create(
+ **{"shout": shout.id, "topic": new_topic.id}
+ )
except Exception:
print("[migration] shout topic error: " + newslug)
session.commit()
@@ -364,7 +363,9 @@ async def content_ratings_to_reactions(entry, slug):
with local_session() as session:
for content_rating in entry.get("ratings", []):
rater = (
- session.query(User).filter(User.oid == content_rating["createdBy"]).first()
+ session.query(User)
+ .filter(User.oid == content_rating["createdBy"])
+ .first()
) or User.default_user
shout = session.query(Shout).where(Shout.slug == slug).first()
cts = content_rating.get("createdAt")
@@ -374,7 +375,7 @@ async def content_ratings_to_reactions(entry, slug):
if content_rating["value"] > 0
else ReactionKind.DISLIKE,
"createdBy": rater.id,
- "shout": shout.id,
+ "shout": shout.id
}
reaction = (
session.query(Reaction)
diff --git a/migration/tables/remarks.py b/migration/tables/remarks.py
index e133050f..026b95c6 100644
--- a/migration/tables/remarks.py
+++ b/migration/tables/remarks.py
@@ -5,26 +5,34 @@ from orm.reaction import Reaction, ReactionKind
def migrate(entry, storage):
- post_oid = entry["contentItem"]
+ post_oid = entry['contentItem']
print(post_oid)
- shout_dict = storage["shouts"]["by_oid"].get(post_oid)
+ shout_dict = storage['shouts']['by_oid'].get(post_oid)
if shout_dict:
- print(shout_dict["body"])
+ print(shout_dict['body'])
remark = {
- "shout": shout_dict["id"],
- "body": extract_md(html2text(entry["body"]), shout_dict),
- "kind": ReactionKind.REMARK,
+ "shout": shout_dict['id'],
+ "body": extract_md(
+ html2text(entry['body']),
+ shout_dict
+ ),
+ "kind": ReactionKind.REMARK
}
- if entry.get("textBefore"):
- remark["range"] = (
- str(shout_dict["body"].index(entry["textBefore"] or ""))
- + ":"
- + str(
- shout_dict["body"].index(entry["textAfter"] or "")
- + len(entry["textAfter"] or "")
+ if entry.get('textBefore'):
+ remark['range'] = str(
+ shout_dict['body']
+ .index(
+ entry['textBefore'] or ''
+ )
+ ) + ':' + str(
+ shout_dict['body']
+ .index(
+ entry['textAfter'] or ''
+ ) + len(
+ entry['textAfter'] or ''
+ )
)
- )
with local_session() as session:
rmrk = Reaction.create(**remark)
diff --git a/migration/tables/topics.py b/migration/tables/topics.py
index ae9ddbda..17804376 100644
--- a/migration/tables/topics.py
+++ b/migration/tables/topics.py
@@ -10,7 +10,7 @@ def migrate(entry):
"slug": entry["slug"],
"oid": entry["_id"],
"title": entry["title"].replace(" ", " "),
- "body": extract_md(html2text(body_orig)),
+ "body": extract_md(html2text(body_orig))
}
with local_session() as session:
diff --git a/migration/tables/users.py b/migration/tables/users.py
index 40c80f21..3ccf9029 100644
--- a/migration/tables/users.py
+++ b/migration/tables/users.py
@@ -1,10 +1,11 @@
-from base.orm import local_session
+import re
+
from bs4 import BeautifulSoup
from dateutil.parser import parse
-from orm.user import AuthorFollower, User, UserRating
from sqlalchemy.exc import IntegrityError
-import re
+from base.orm import local_session
+from orm.user import AuthorFollower, User, UserRating
def migrate(entry):
@@ -22,7 +23,7 @@ def migrate(entry):
"muted": False, # amnesty
"links": [],
"name": "anonymous",
- "password": entry["services"]["password"].get("bcrypt"),
+ "password": entry["services"]["password"].get("bcrypt")
}
if "updatedAt" in entry:
@@ -32,13 +33,9 @@ def migrate(entry):
if entry.get("profile"):
# slug
slug = entry["profile"].get("path").lower()
- slug = re.sub("[^0-9a-zA-Z]+", "-", slug).strip()
+ slug = re.sub('[^0-9a-zA-Z]+', '-', slug).strip()
user_dict["slug"] = slug
- bio = (
- (entry.get("profile", {"bio": ""}).get("bio") or "")
- .replace(r"\(", "(")
- .replace(r"\)", ")")
- )
+ bio = (entry.get("profile", {"bio": ""}).get("bio") or "").replace('\(', '(').replace('\)', ')')
bio_text = BeautifulSoup(bio, features="lxml").text
if len(bio_text) > 120:
@@ -49,7 +46,8 @@ def migrate(entry):
# userpic
try:
user_dict["userpic"] = (
- "https://images.discours.io/unsafe/" + entry["profile"]["thumborId"]
+ "https://images.discours.io/unsafe/"
+ + entry["profile"]["thumborId"]
)
except KeyError:
try:
@@ -64,7 +62,11 @@ def migrate(entry):
name = (name + " " + ln) if ln else name
if not name:
name = slug if slug else "anonymous"
- name = entry["profile"]["path"].lower().strip().replace(" ", "-") if len(name) < 2 else name
+ name = (
+ entry["profile"]["path"].lower().strip().replace(" ", "-")
+ if len(name) < 2
+ else name
+ )
user_dict["name"] = name
# links
@@ -93,7 +95,9 @@ def migrate(entry):
except IntegrityError:
print("[migration] cannot create user " + user_dict["slug"])
with local_session() as session:
- old_user = session.query(User).filter(User.slug == user_dict["slug"]).first()
+ old_user = (
+ session.query(User).filter(User.slug == user_dict["slug"]).first()
+ )
old_user.oid = oid
old_user.password = user_dict["password"]
session.commit()
@@ -110,7 +114,7 @@ def post_migrate():
"slug": "old-discours",
"username": "old-discours",
"email": "old@discours.io",
- "name": "Просмотры на старой версии сайта",
+ "name": "Просмотры на старой версии сайта"
}
with local_session() as session:
@@ -143,8 +147,12 @@ def migrate_2stage(entry, id_map):
}
user_rating = UserRating.create(**user_rating_dict)
- if user_rating_dict["value"] > 0:
- af = AuthorFollower.create(author=user.id, follower=rater.id, auto=True)
+ if user_rating_dict['value'] > 0:
+ af = AuthorFollower.create(
+ author=user.id,
+ follower=rater.id,
+ auto=True
+ )
session.add(af)
session.add(user_rating)
session.commit()
diff --git a/orm/__init__.py b/orm/__init__.py
index 9f66f85c..53b13951 100644
--- a/orm/__init__.py
+++ b/orm/__init__.py
@@ -1,7 +1,7 @@
from base.orm import Base, engine
from orm.community import Community
from orm.notification import Notification
-from orm.rbac import Operation, Permission, Resource, Role
+from orm.rbac import Operation, Resource, Permission, Role
from orm.reaction import Reaction
from orm.shout import Shout
from orm.topic import Topic, TopicFollower
@@ -32,5 +32,5 @@ __all__ = [
"Notification",
"Reaction",
"UserRating",
- "init_tables",
+ "init_tables"
]
diff --git a/orm/collection.py b/orm/collection.py
index 1c432727..c9975b62 100644
--- a/orm/collection.py
+++ b/orm/collection.py
@@ -1,7 +1,9 @@
-from base.orm import Base
from datetime import datetime
+
from sqlalchemy import Column, DateTime, ForeignKey, String
+from base.orm import Base
+
class ShoutCollection(Base):
__tablename__ = "shout_collection"
diff --git a/orm/community.py b/orm/community.py
index c31732a0..b55b857f 100644
--- a/orm/community.py
+++ b/orm/community.py
@@ -1,6 +1,7 @@
-from base.orm import Base, local_session
from datetime import datetime
-from sqlalchemy import Column, DateTime, ForeignKey, String
+
+from sqlalchemy import Column, String, ForeignKey, DateTime
+from base.orm import Base, local_session
class CommunityFollower(Base):
@@ -9,7 +10,9 @@ class CommunityFollower(Base):
id = None # type: ignore
follower = Column(ForeignKey("user.id"), primary_key=True)
community = Column(ForeignKey("community.id"), primary_key=True)
- joinedAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
+ joinedAt = Column(
+ DateTime, nullable=False, default=datetime.now, comment="Created at"
+ )
# role = Column(ForeignKey(Role.id), nullable=False, comment="Role for member")
@@ -20,15 +23,19 @@ class Community(Base):
slug = Column(String, nullable=False, unique=True, comment="Slug")
desc = Column(String, nullable=False, default="")
pic = Column(String, nullable=False, default="")
- createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
+ createdAt = Column(
+ DateTime, nullable=False, default=datetime.now, comment="Created at"
+ )
@staticmethod
def init_table():
with local_session() as session:
- d = session.query(Community).filter(Community.slug == "discours").first()
+ d = (
+ session.query(Community).filter(Community.slug == "discours").first()
+ )
if not d:
d = Community.create(name="Дискурс", slug="discours")
session.add(d)
session.commit()
Community.default_community = d
- print("[orm] default community id: %s" % d.id)
+ print('[orm] default community id: %s' % d.id)
diff --git a/orm/notification.py b/orm/notification.py
index 2fdc9d5d..25f4e4f3 100644
--- a/orm/notification.py
+++ b/orm/notification.py
@@ -1,9 +1,10 @@
-from base.orm import Base
from datetime import datetime
-from enum import Enum as Enumeration
-from sqlalchemy import Boolean, Column, DateTime, Enum, ForeignKey, Integer
+from sqlalchemy import Column, Enum, ForeignKey, DateTime, Boolean, Integer
from sqlalchemy.dialects.postgresql import JSONB
+from base.orm import Base
+from enum import Enum as Enumeration
+
class NotificationType(Enumeration):
NEW_COMMENT = 1
diff --git a/orm/rbac.py b/orm/rbac.py
index bb7eb34b..29ade72e 100644
--- a/orm/rbac.py
+++ b/orm/rbac.py
@@ -1,8 +1,9 @@
-from base.orm import Base, local_session, REGISTRY
-from sqlalchemy import Column, ForeignKey, String, TypeDecorator, UniqueConstraint
+import warnings
+
+from sqlalchemy import String, Column, ForeignKey, UniqueConstraint, TypeDecorator
from sqlalchemy.orm import relationship
-import warnings
+from base.orm import Base, REGISTRY, engine, local_session
# Role Based Access Control #
@@ -120,23 +121,16 @@ class Operation(Base):
class Resource(Base):
__tablename__ = "resource"
- resourceClass = Column(String, nullable=False, unique=True, comment="Resource class")
+ resourceClass = Column(
+ String, nullable=False, unique=True, comment="Resource class"
+ )
name = Column(String, nullable=False, unique=True, comment="Resource name")
# TODO: community = Column(ForeignKey())
@staticmethod
def init_table():
with local_session() as session:
- for res in [
- "shout",
- "topic",
- "reaction",
- "chat",
- "message",
- "invite",
- "community",
- "user",
- ]:
+ for res in ["shout", "topic", "reaction", "chat", "message", "invite", "community", "user"]:
r = session.query(Resource).filter(Resource.name == res).first()
if not r:
r = Resource.create(name=res, resourceClass=res)
@@ -151,7 +145,9 @@ class Permission(Base):
{"extend_existing": True},
)
- role = Column(ForeignKey("role.id", ondelete="CASCADE"), nullable=False, comment="Role")
+ role = Column(
+ ForeignKey("role.id", ondelete="CASCADE"), nullable=False, comment="Role"
+ )
operation = Column(
ForeignKey("operation.id", ondelete="CASCADE"),
nullable=False,
@@ -164,14 +160,14 @@ class Permission(Base):
)
-# if __name__ == "__main__":
-# Base.metadata.create_all(engine)
-# ops = [
-# Permission(role=1, operation=1, resource=1),
-# Permission(role=1, operation=2, resource=1),
-# Permission(role=1, operation=3, resource=1),
-# Permission(role=1, operation=4, resource=1),
-# Permission(role=2, operation=4, resource=1),
-# ]
-# global_session.add_all(ops)
-# global_session.commit()
+if __name__ == "__main__":
+ Base.metadata.create_all(engine)
+ ops = [
+ Permission(role=1, operation=1, resource=1),
+ Permission(role=1, operation=2, resource=1),
+ Permission(role=1, operation=3, resource=1),
+ Permission(role=1, operation=4, resource=1),
+ Permission(role=2, operation=4, resource=1),
+ ]
+ global_session.add_all(ops)
+ global_session.commit()
diff --git a/orm/reaction.py b/orm/reaction.py
index 89fed9eb..1c129e23 100644
--- a/orm/reaction.py
+++ b/orm/reaction.py
@@ -1,8 +1,10 @@
-from base.orm import Base
from datetime import datetime
from enum import Enum as Enumeration
+
from sqlalchemy import Column, DateTime, Enum, ForeignKey, String
+from base.orm import Base
+
class ReactionKind(Enumeration):
AGREE = 1 # +1
@@ -25,14 +27,18 @@ class ReactionKind(Enumeration):
class Reaction(Base):
__tablename__ = "reaction"
body = Column(String, nullable=True, comment="Reaction Body")
- createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
+ createdAt = Column(
+ DateTime, nullable=False, default=datetime.now, comment="Created at"
+ )
createdBy = Column(ForeignKey("user.id"), nullable=False, index=True, comment="Sender")
updatedAt = Column(DateTime, nullable=True, comment="Updated at")
updatedBy = Column(ForeignKey("user.id"), nullable=True, index=True, comment="Last Editor")
deletedAt = Column(DateTime, nullable=True, comment="Deleted at")
deletedBy = Column(ForeignKey("user.id"), nullable=True, index=True, comment="Deleted by")
shout = Column(ForeignKey("shout.id"), nullable=False, index=True)
- replyTo = Column(ForeignKey("reaction.id"), nullable=True, comment="Reply to reaction ID")
+ replyTo = Column(
+ ForeignKey("reaction.id"), nullable=True, comment="Reply to reaction ID"
+ )
range = Column(String, nullable=True, comment="Range in format
:")
kind = Column(Enum(ReactionKind), nullable=False, comment="Reaction kind")
oid = Column(String, nullable=True, comment="Old ID")
diff --git a/orm/shout.py b/orm/shout.py
index 7a77b66c..22381d4c 100644
--- a/orm/shout.py
+++ b/orm/shout.py
@@ -1,10 +1,12 @@
-from base.orm import Base, local_session
from datetime import datetime
+
+from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, String, JSON
+from sqlalchemy.orm import column_property, relationship
+
+from base.orm import Base, local_session
from orm.reaction import Reaction
from orm.topic import Topic
from orm.user import User
-from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, JSON, String
-from sqlalchemy.orm import column_property, relationship
class ShoutTopic(Base):
@@ -22,7 +24,9 @@ class ShoutReactionsFollower(Base):
follower = Column(ForeignKey("user.id"), primary_key=True, index=True)
shout = Column(ForeignKey("shout.id"), primary_key=True, index=True)
auto = Column(Boolean, nullable=False, default=False)
- createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
+ createdAt = Column(
+ DateTime, nullable=False, default=datetime.now, comment="Created at"
+ )
deletedAt = Column(DateTime, nullable=True)
@@ -68,7 +72,7 @@ class Shout(Base):
# TODO: these field should be used or modified
community = Column(ForeignKey("community.id"), default=1)
- lang = Column(String, nullable=False, default="ru", comment="Language")
+ lang = Column(String, nullable=False, default='ru', comment="Language")
mainTopic = Column(ForeignKey("topic.slug"), nullable=True)
visibility = Column(String, nullable=True) # owner authors community public
versionOf = Column(ForeignKey("shout.id"), nullable=True)
@@ -83,7 +87,7 @@ class Shout(Base):
"slug": "genesis-block",
"body": "",
"title": "Ничего",
- "lang": "ru",
+ "lang": "ru"
}
s = Shout.create(**entry)
session.add(s)
diff --git a/orm/topic.py b/orm/topic.py
index 6da93732..a37dc69a 100644
--- a/orm/topic.py
+++ b/orm/topic.py
@@ -1,7 +1,9 @@
-from base.orm import Base
from datetime import datetime
+
from sqlalchemy import Boolean, Column, DateTime, ForeignKey, String
+from base.orm import Base
+
class TopicFollower(Base):
__tablename__ = "topic_followers"
@@ -9,7 +11,9 @@ class TopicFollower(Base):
id = None # type: ignore
follower = Column(ForeignKey("user.id"), primary_key=True, index=True)
topic = Column(ForeignKey("topic.id"), primary_key=True, index=True)
- createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
+ createdAt = Column(
+ DateTime, nullable=False, default=datetime.now, comment="Created at"
+ )
auto = Column(Boolean, nullable=False, default=False)
@@ -20,5 +24,7 @@ class Topic(Base):
title = Column(String, nullable=False, comment="Title")
body = Column(String, nullable=True, comment="Body")
pic = Column(String, nullable=True, comment="Picture")
- community = Column(ForeignKey("community.id"), default=1, comment="Community")
+ community = Column(
+ ForeignKey("community.id"), default=1, comment="Community"
+ )
oid = Column(String, nullable=True, comment="Old ID")
diff --git a/orm/user.py b/orm/user.py
index d76c4627..5aeab90e 100644
--- a/orm/user.py
+++ b/orm/user.py
@@ -1,10 +1,10 @@
-from base.orm import Base, local_session
from datetime import datetime
-from orm.rbac import Role
-from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer
+
from sqlalchemy import JSON as JSONType
-from sqlalchemy import String
+from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, String
from sqlalchemy.orm import relationship
+from base.orm import Base, local_session
+from orm.rbac import Role
class UserRating(Base):
@@ -34,7 +34,9 @@ class AuthorFollower(Base):
id = None # type: ignore
follower = Column(ForeignKey("user.id"), primary_key=True, index=True)
author = Column(ForeignKey("user.id"), primary_key=True, index=True)
- createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
+ createdAt = Column(
+ DateTime, nullable=False, default=datetime.now, comment="Created at"
+ )
auto = Column(Boolean, nullable=False, default=False)
@@ -52,8 +54,12 @@ class User(Base):
slug = Column(String, unique=True, comment="User's slug")
muted = Column(Boolean, default=False)
emailConfirmed = Column(Boolean, default=False)
- createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
- lastSeen = Column(DateTime, nullable=False, default=datetime.now, comment="Was online at")
+ createdAt = Column(
+ DateTime, nullable=False, default=datetime.now, comment="Created at"
+ )
+ lastSeen = Column(
+ DateTime, nullable=False, default=datetime.now, comment="Was online at"
+ )
deletedAt = Column(DateTime, nullable=True, comment="Deleted at")
links = Column(JSONType, nullable=True, comment="Links")
oauth = Column(String, nullable=True)
diff --git a/requirements-dev.txt b/requirements-dev.txt
index 31fbe456..d221f3b0 100755
--- a/requirements-dev.txt
+++ b/requirements-dev.txt
@@ -2,5 +2,3 @@ isort
brunette
flake8
mypy
-pre-commit
-black
diff --git a/requirements.txt b/requirements.txt
index a919e623..edbf46ff 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -18,12 +18,15 @@ transliterate~=1.10.2
requests~=2.28.1
bcrypt>=4.0.0
bson~=0.5.10
+flake8
DateTime~=4.7
asyncio~=3.4.3
python-dateutil~=2.8.2
beautifulsoup4~=4.11.1
lxml
sentry-sdk>=1.14.0
+# sse_starlette
+graphql-ws
nltk~=3.8.1
pymystem3~=0.2.0
transformers~=4.28.1
diff --git a/resetdb.sh b/resetdb.sh
index 40ba2e37..39b3b9b2 100755
--- a/resetdb.sh
+++ b/resetdb.sh
@@ -53,3 +53,4 @@ echo "Start migration"
python3 server.py migrate
if [ $? -ne 0 ]; then { echo "Migration failed, aborting." ; exit 1; } fi
echo 'Done!'
+
diff --git a/resolvers/__init__.py b/resolvers/__init__.py
new file mode 100644
index 00000000..5d753ac4
--- /dev/null
+++ b/resolvers/__init__.py
@@ -0,0 +1,67 @@
+from resolvers.auth import (
+ login,
+ sign_out,
+ is_email_used,
+ register_by_email,
+ confirm_email,
+ auth_send_link,
+ get_current_user,
+)
+
+from resolvers.create.migrate import markdown_body
+from resolvers.create.editor import create_shout, delete_shout, update_shout
+
+from resolvers.zine.profile import (
+ load_authors_by,
+ rate_user,
+ update_profile,
+ get_authors_all
+)
+
+from resolvers.zine.reactions import (
+ create_reaction,
+ delete_reaction,
+ update_reaction,
+ reactions_unfollow,
+ reactions_follow,
+ load_reactions_by
+)
+from resolvers.zine.topics import (
+ topic_follow,
+ topic_unfollow,
+ topics_by_author,
+ topics_by_community,
+ topics_all,
+ get_topic
+)
+
+from resolvers.zine.following import (
+ follow,
+ unfollow
+)
+
+from resolvers.zine.load import (
+ load_shout,
+ load_shouts_by
+)
+
+from resolvers.inbox.chats import (
+ create_chat,
+ delete_chat,
+ update_chat
+
+)
+from resolvers.inbox.messages import (
+ create_message,
+ delete_message,
+ update_message,
+ mark_as_read
+)
+from resolvers.inbox.load import (
+ load_chats,
+ load_messages_by,
+ load_recipients
+)
+from resolvers.inbox.search import search_recipients
+
+from resolvers.notifications import load_notifications
diff --git a/resolvers/auth.py b/resolvers/auth.py
index 3ba15d9d..17369b7a 100644
--- a/resolvers/auth.py
+++ b/resolvers/auth.py
@@ -1,29 +1,24 @@
# -*- coding: utf-8 -*-
+from datetime import datetime, timezone
+from urllib.parse import quote_plus
+
+from graphql.type import GraphQLResolveInfo
+from starlette.responses import RedirectResponse
+from transliterate import translit
+import re
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
from auth.email import send_auth_email
from auth.identity import Identity, Password
from auth.jwtcodec import JWTCodec
from auth.tokenstorage import TokenStorage
-from base.exceptions import (
- BaseHttpException,
- InvalidPassword,
- InvalidToken,
- ObjectNotExist,
- Unauthorized,
-)
+from base.exceptions import (BaseHttpException, InvalidPassword, InvalidToken,
+ ObjectNotExist, Unauthorized)
from base.orm import local_session
from base.resolvers import mutation, query
-from datetime import datetime, timezone
-from graphql.type import GraphQLResolveInfo
from orm import Role, User
-from settings import FRONTEND_URL, SESSION_TOKEN_HEADER
-from starlette.responses import RedirectResponse
-from transliterate import translit
-from urllib.parse import quote_plus
-
-import re
+from settings import SESSION_TOKEN_HEADER, FRONTEND_URL
@mutation.field("getSession")
@@ -37,14 +32,17 @@ async def get_current_user(_, info):
user.lastSeen = datetime.now(tz=timezone.utc)
session.commit()
- return {"token": token, "user": user}
+ return {
+ "token": token,
+ "user": user
+ }
@mutation.field("confirmEmail")
async def confirm_email(_, info, token):
"""confirm owning email address"""
try:
- print("[resolvers.auth] confirm email by token")
+ print('[resolvers.auth] confirm email by token')
payload = JWTCodec.decode(token)
user_id = payload.user_id
await TokenStorage.get(f"{user_id}-{payload.username}-{token}")
@@ -55,7 +53,10 @@ async def confirm_email(_, info, token):
user.lastSeen = datetime.now(tz=timezone.utc)
session.add(user)
session.commit()
- return {"token": session_token, "user": user}
+ return {
+ "token": session_token,
+ "user": user
+ }
except InvalidToken as e:
raise InvalidToken(e.message)
except Exception as e:
@@ -67,9 +68,9 @@ async def confirm_email_handler(request):
token = request.path_params["token"] # one time
request.session["token"] = token
res = await confirm_email(None, {}, token)
- print("[resolvers.auth] confirm_email request: %r" % request)
+ print('[resolvers.auth] confirm_email request: %r' % request)
if "error" in res:
- raise BaseHttpException(res["error"])
+ raise BaseHttpException(res['error'])
else:
response = RedirectResponse(url=FRONTEND_URL)
response.set_cookie("token", res["token"]) # session token
@@ -86,22 +87,22 @@ def create_user(user_dict):
def generate_unique_slug(src):
- print("[resolvers.auth] generating slug from: " + src)
+ print('[resolvers.auth] generating slug from: ' + src)
slug = translit(src, "ru", reversed=True).replace(".", "-").lower()
- slug = re.sub("[^0-9a-zA-Z]+", "-", slug)
+ slug = re.sub('[^0-9a-zA-Z]+', '-', slug)
if slug != src:
- print("[resolvers.auth] translited name: " + slug)
+ print('[resolvers.auth] translited name: ' + slug)
c = 1
with local_session() as session:
user = session.query(User).where(User.slug == slug).first()
while user:
user = session.query(User).where(User.slug == slug).first()
- slug = slug + "-" + str(c)
+ slug = slug + '-' + str(c)
c += 1
if not user:
unique_slug = slug
- print("[resolvers.auth] " + unique_slug)
- return quote_plus(unique_slug.replace("'", "")).replace("+", "-")
+ print('[resolvers.auth] ' + unique_slug)
+ return quote_plus(unique_slug.replace('\'', '')).replace('+', '-')
@mutation.field("registerUser")
@@ -116,12 +117,12 @@ async def register_by_email(_, _info, email: str, password: str = "", name: str
slug = generate_unique_slug(name)
user = session.query(User).where(User.slug == slug).first()
if user:
- slug = generate_unique_slug(email.split("@")[0])
+ slug = generate_unique_slug(email.split('@')[0])
user_dict = {
"email": email,
"username": email, # will be used to store phone number or some messenger network id
"name": name,
- "slug": slug,
+ "slug": slug
}
if password:
user_dict["password"] = Password.encode(password)
@@ -171,7 +172,10 @@ async def login(_, info, email: str, password: str = "", lang: str = "ru"):
user = Identity.password(orm_user, password)
session_token = await TokenStorage.create_session(user)
print(f"[auth] user {email} authorized")
- return {"token": session_token, "user": user}
+ return {
+ "token": session_token,
+ "user": user
+ }
except InvalidPassword:
print(f"[auth] {email}: invalid password")
raise InvalidPassword("invalid password") # contains webserver status
diff --git a/resolvers/create/editor.py b/resolvers/create/editor.py
index 6ec690f7..c81ff404 100644
--- a/resolvers/create/editor.py
+++ b/resolvers/create/editor.py
@@ -1,13 +1,15 @@
+from datetime import datetime, timezone
+
+from sqlalchemy import and_
+from sqlalchemy.orm import joinedload
+
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
from base.orm import local_session
from base.resolvers import mutation
-from datetime import datetime, timezone
from orm.shout import Shout, ShoutAuthor, ShoutTopic
from orm.topic import Topic
from resolvers.zine.reactions import reactions_follow, reactions_unfollow
-from sqlalchemy import and_
-from sqlalchemy.orm import joinedload
@mutation.field("createShout")
@@ -16,23 +18,21 @@ async def create_shout(_, info, inp):
auth: AuthCredentials = info.context["request"].auth
with local_session() as session:
- topics = session.query(Topic).filter(Topic.slug.in_(inp.get("topics", []))).all()
+ topics = session.query(Topic).filter(Topic.slug.in_(inp.get('topics', []))).all()
- new_shout = Shout.create(
- **{
- "title": inp.get("title"),
- "subtitle": inp.get("subtitle"),
- "lead": inp.get("lead"),
- "description": inp.get("description"),
- "body": inp.get("body", ""),
- "layout": inp.get("layout"),
- "authors": inp.get("authors", []),
- "slug": inp.get("slug"),
- "mainTopic": inp.get("mainTopic"),
- "visibility": "owner",
- "createdBy": auth.user_id,
- }
- )
+ new_shout = Shout.create(**{
+ "title": inp.get("title"),
+ "subtitle": inp.get('subtitle'),
+ "lead": inp.get('lead'),
+ "description": inp.get('description'),
+ "body": inp.get("body", ''),
+ "layout": inp.get("layout"),
+ "authors": inp.get("authors", []),
+ "slug": inp.get("slug"),
+ "mainTopic": inp.get("mainTopic"),
+ "visibility": "owner",
+ "createdBy": auth.user_id
+ })
for topic in topics:
t = ShoutTopic.create(topic=topic.id, shout=new_shout.id)
@@ -64,15 +64,10 @@ async def update_shout(_, info, shout_id, shout_input=None, publish=False):
auth: AuthCredentials = info.context["request"].auth
with local_session() as session:
- shout = (
- session.query(Shout)
- .options(
- joinedload(Shout.authors),
- joinedload(Shout.topics),
- )
- .filter(Shout.id == shout_id)
- .first()
- )
+ shout = session.query(Shout).options(
+ joinedload(Shout.authors),
+ joinedload(Shout.topics),
+ ).filter(Shout.id == shout_id).first()
if not shout:
return {"error": "shout not found"}
@@ -99,36 +94,24 @@ async def update_shout(_, info, shout_id, shout_input=None, publish=False):
session.commit()
for new_topic_to_link in new_topics_to_link:
- created_unlinked_topic = ShoutTopic.create(
- shout=shout.id, topic=new_topic_to_link.id
- )
+ created_unlinked_topic = ShoutTopic.create(shout=shout.id, topic=new_topic_to_link.id)
session.add(created_unlinked_topic)
- existing_topics_input = [
- topic_input for topic_input in topics_input if topic_input.get("id", 0) > 0
- ]
- existing_topic_to_link_ids = [
- existing_topic_input["id"]
- for existing_topic_input in existing_topics_input
- if existing_topic_input["id"] not in [topic.id for topic in shout.topics]
- ]
+ existing_topics_input = [topic_input for topic_input in topics_input if topic_input.get("id", 0) > 0]
+ existing_topic_to_link_ids = [existing_topic_input["id"] for existing_topic_input in existing_topics_input
+ if existing_topic_input["id"] not in [topic.id for topic in shout.topics]]
for existing_topic_to_link_id in existing_topic_to_link_ids:
- created_unlinked_topic = ShoutTopic.create(
- shout=shout.id, topic=existing_topic_to_link_id
- )
+ created_unlinked_topic = ShoutTopic.create(shout=shout.id, topic=existing_topic_to_link_id)
session.add(created_unlinked_topic)
- topic_to_unlink_ids = [
- topic.id
- for topic in shout.topics
- if topic.id not in [topic_input["id"] for topic_input in existing_topics_input]
- ]
+ topic_to_unlink_ids = [topic.id for topic in shout.topics
+ if topic.id not in [topic_input["id"] for topic_input in existing_topics_input]]
shout_topics_to_remove = session.query(ShoutTopic).filter(
and_(
ShoutTopic.shout == shout.id,
- ShoutTopic.topic.in_(topic_to_unlink_ids),
+ ShoutTopic.topic.in_(topic_to_unlink_ids)
)
)
@@ -137,13 +120,13 @@ async def update_shout(_, info, shout_id, shout_input=None, publish=False):
shout_input["mainTopic"] = shout_input["mainTopic"]["slug"]
- if shout_input["mainTopic"] == "":
+ if shout_input["mainTopic"] == '':
del shout_input["mainTopic"]
shout.update(shout_input)
updated = True
- if publish and shout.visibility == "owner":
+ if publish and shout.visibility == 'owner':
shout.visibility = "community"
shout.publishedAt = datetime.now(tz=timezone.utc)
updated = True
diff --git a/resolvers/create/migrate.py b/resolvers/create/migrate.py
index 028808b1..f16341f0 100644
--- a/resolvers/create/migrate.py
+++ b/resolvers/create/migrate.py
@@ -1,10 +1,11 @@
-# from base.resolvers import query
-# from migration.extract import extract_md
-# from resolvers.auth import login_required
-#
-#
-# @login_required
-# @query.field("markdownBody")
-# def markdown_body(_, info, body: str):
-# body = extract_md(body)
-# return body
+
+from base.resolvers import query
+from resolvers.auth import login_required
+from migration.extract import extract_md
+
+
+@login_required
+@query.field("markdownBody")
+def markdown_body(_, info, body: str):
+ body = extract_md(body)
+ return body
diff --git a/resolvers/inbox/chats.py b/resolvers/inbox/chats.py
index 95a31f69..853defab 100644
--- a/resolvers/inbox/chats.py
+++ b/resolvers/inbox/chats.py
@@ -1,13 +1,13 @@
+import json
+import uuid
+from datetime import datetime, timezone
+
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
from base.redis import redis
from base.resolvers import mutation
-from datetime import datetime, timezone
from validations.inbox import Chat
-import json
-import uuid
-
@mutation.field("updateChat")
@login_required
@@ -24,24 +24,27 @@ async def update_chat(_, info, chat_new: Chat):
chat_id = chat_new["id"]
chat = await redis.execute("GET", f"chats/{chat_id}")
if not chat:
- return {"error": "chat not exist"}
+ return {
+ "error": "chat not exist"
+ }
chat = dict(json.loads(chat))
# TODO
if auth.user_id in chat["admins"]:
- chat.update(
- {
- "title": chat_new.get("title", chat["title"]),
- "description": chat_new.get("description", chat["description"]),
- "updatedAt": int(datetime.now(tz=timezone.utc).timestamp()),
- "admins": chat_new.get("admins", chat.get("admins") or []),
- "users": chat_new.get("users", chat["users"]),
- }
- )
+ chat.update({
+ "title": chat_new.get("title", chat["title"]),
+ "description": chat_new.get("description", chat["description"]),
+ "updatedAt": int(datetime.now(tz=timezone.utc).timestamp()),
+ "admins": chat_new.get("admins", chat.get("admins") or []),
+ "users": chat_new.get("users", chat["users"])
+ })
await redis.execute("SET", f"chats/{chat.id}", json.dumps(chat))
await redis.execute("COMMIT")
- return {"error": None, "chat": chat}
+ return {
+ "error": None,
+ "chat": chat
+ }
@mutation.field("createChat")
@@ -49,7 +52,7 @@ async def update_chat(_, info, chat_new: Chat):
async def create_chat(_, info, title="", members=[]):
auth: AuthCredentials = info.context["request"].auth
chat = {}
- print("create_chat members: %r" % members)
+ print('create_chat members: %r' % members)
if auth.user_id not in members:
members.append(int(auth.user_id))
@@ -71,12 +74,15 @@ async def create_chat(_, info, title="", members=[]):
chat = await redis.execute("GET", f"chats/{c.decode('utf-8')}")
if chat:
chat = json.loads(chat)
- if chat["title"] == "":
- print("[inbox] createChat found old chat")
+ if chat['title'] == "":
+ print('[inbox] createChat found old chat')
print(chat)
break
if chat:
- return {"chat": chat, "error": "existed"}
+ return {
+ "chat": chat,
+ "error": "existed"
+ }
chat_id = str(uuid.uuid4())
chat = {
@@ -86,7 +92,7 @@ async def create_chat(_, info, title="", members=[]):
"createdBy": auth.user_id,
"createdAt": int(datetime.now(tz=timezone.utc).timestamp()),
"updatedAt": int(datetime.now(tz=timezone.utc).timestamp()),
- "admins": members if (len(members) == 2 and title == "") else [],
+ "admins": members if (len(members) == 2 and title == "") else []
}
for m in members:
@@ -94,7 +100,10 @@ async def create_chat(_, info, title="", members=[]):
await redis.execute("SET", f"chats/{chat_id}", json.dumps(chat))
await redis.execute("SET", f"chats/{chat_id}/next_message_id", str(0))
await redis.execute("COMMIT")
- return {"error": None, "chat": chat}
+ return {
+ "error": None,
+ "chat": chat
+ }
@mutation.field("deleteChat")
@@ -105,9 +114,11 @@ async def delete_chat(_, info, chat_id: str):
chat = await redis.execute("GET", f"/chats/{chat_id}")
if chat:
chat = dict(json.loads(chat))
- if auth.user_id in chat["admins"]:
+ if auth.user_id in chat['admins']:
await redis.execute("DEL", f"chats/{chat_id}")
await redis.execute("SREM", "chats_by_user/" + str(auth.user_id), chat_id)
await redis.execute("COMMIT")
else:
- return {"error": "chat not exist"}
+ return {
+ "error": "chat not exist"
+ }
diff --git a/resolvers/inbox/load.py b/resolvers/inbox/load.py
index 54ae75d5..a0d41721 100644
--- a/resolvers/inbox/load.py
+++ b/resolvers/inbox/load.py
@@ -1,26 +1,28 @@
-from .unread import get_unread_counter
+import json
+# from datetime import datetime, timedelta, timezone
+
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
-from base.orm import local_session
from base.redis import redis
+from base.orm import local_session
from base.resolvers import query
from orm.user import User
from resolvers.zine.profile import followed_authors
-
-import json
-
-# from datetime import datetime, timedelta, timezone
+from .unread import get_unread_counter
async def load_messages(chat_id: str, limit: int = 5, offset: int = 0, ids=[]):
- """load :limit messages for :chat_id with :offset"""
+ ''' load :limit messages for :chat_id with :offset '''
messages = []
message_ids = []
if ids:
message_ids += ids
try:
if limit:
- mids = await redis.lrange(f"chats/{chat_id}/message_ids", offset, offset + limit)
+ mids = await redis.lrange(f"chats/{chat_id}/message_ids",
+ offset,
+ offset + limit
+ )
mids = [mid.decode("utf-8") for mid in mids]
message_ids += mids
except Exception as e:
@@ -28,10 +30,10 @@ async def load_messages(chat_id: str, limit: int = 5, offset: int = 0, ids=[]):
if message_ids:
message_keys = [f"chats/{chat_id}/messages/{mid}" for mid in message_ids]
messages = await redis.mget(*message_keys)
- messages = [json.loads(msg.decode("utf-8")) for msg in messages]
+ messages = [json.loads(msg.decode('utf-8')) for msg in messages]
replies = []
for m in messages:
- rt = m.get("replyTo")
+ rt = m.get('replyTo')
if rt:
rt = int(rt)
if rt not in message_ids:
@@ -44,14 +46,14 @@ async def load_messages(chat_id: str, limit: int = 5, offset: int = 0, ids=[]):
@query.field("loadChats")
@login_required
async def load_chats(_, info, limit: int = 50, offset: int = 0):
- """load :limit chats of current user with :offset"""
+ """ load :limit chats of current user with :offset """
auth: AuthCredentials = info.context["request"].auth
cids = await redis.execute("SMEMBERS", "chats_by_user/" + str(auth.user_id))
if cids:
- cids = list(cids)[offset : offset + limit]
+ cids = list(cids)[offset:offset + limit]
if not cids:
- print("[inbox.load] no chats were found")
+ print('[inbox.load] no chats were found')
cids = []
onliners = await redis.execute("SMEMBERS", "users-online")
if not onliners:
@@ -62,53 +64,62 @@ async def load_chats(_, info, limit: int = 50, offset: int = 0):
c = await redis.execute("GET", "chats/" + cid)
if c:
c = dict(json.loads(c))
- c["messages"] = await load_messages(cid, 5, 0)
- c["unread"] = await get_unread_counter(cid, auth.user_id)
+ c['messages'] = await load_messages(cid, 5, 0)
+ c['unread'] = await get_unread_counter(cid, auth.user_id)
with local_session() as session:
- c["members"] = []
+ c['members'] = []
for uid in c["users"]:
a = session.query(User).where(User.id == uid).first()
if a:
- c["members"].append(
- {
- "id": a.id,
- "slug": a.slug,
- "userpic": a.userpic,
- "name": a.name,
- "lastSeen": a.lastSeen,
- "online": a.id in onliners,
- }
- )
+ c['members'].append({
+ "id": a.id,
+ "slug": a.slug,
+ "userpic": a.userpic,
+ "name": a.name,
+ "lastSeen": a.lastSeen,
+ "online": a.id in onliners
+ })
chats.append(c)
- return {"chats": chats, "error": None}
+ return {
+ "chats": chats,
+ "error": None
+ }
@query.field("loadMessagesBy")
@login_required
async def load_messages_by(_, info, by, limit: int = 10, offset: int = 0):
- """load :limit messages of :chat_id with :offset"""
+ ''' load :limit messages of :chat_id with :offset '''
auth: AuthCredentials = info.context["request"].auth
userchats = await redis.execute("SMEMBERS", "chats_by_user/" + str(auth.user_id))
- userchats = [c.decode("utf-8") for c in userchats]
+ userchats = [c.decode('utf-8') for c in userchats]
# print('[inbox] userchats: %r' % userchats)
if userchats:
# print('[inbox] loading messages by...')
messages = []
- by_chat = by.get("chat")
+ by_chat = by.get('chat')
if by_chat in userchats:
chat = await redis.execute("GET", f"chats/{by_chat}")
# print(chat)
if not chat:
- return {"messages": [], "error": "chat not exist"}
+ return {
+ "messages": [],
+ "error": "chat not exist"
+ }
# everyone's messages in filtered chat
messages = await load_messages(by_chat, limit, offset)
return {
- "messages": sorted(list(messages), key=lambda m: m["createdAt"]),
- "error": None,
+ "messages": sorted(
+ list(messages),
+ key=lambda m: m['createdAt']
+ ),
+ "error": None
}
else:
- return {"error": "Cannot access messages of this chat"}
+ return {
+ "error": "Cannot access messages of this chat"
+ }
@query.field("loadRecipients")
@@ -127,14 +138,15 @@ async def load_recipients(_, info, limit=50, offset=0):
chat_users += session.query(User).where(User.emailConfirmed).limit(limit).offset(offset)
members = []
for a in chat_users:
- members.append(
- {
- "id": a.id,
- "slug": a.slug,
- "userpic": a.userpic,
- "name": a.name,
- "lastSeen": a.lastSeen,
- "online": a.id in onliners,
- }
- )
- return {"members": members, "error": None}
+ members.append({
+ "id": a.id,
+ "slug": a.slug,
+ "userpic": a.userpic,
+ "name": a.name,
+ "lastSeen": a.lastSeen,
+ "online": a.id in onliners
+ })
+ return {
+ "members": members,
+ "error": None
+ }
diff --git a/resolvers/inbox/messages.py b/resolvers/inbox/messages.py
index b3d2689f..56187edf 100644
--- a/resolvers/inbox/messages.py
+++ b/resolvers/inbox/messages.py
@@ -1,36 +1,41 @@
+import asyncio
+import json
+from typing import Any
+from datetime import datetime, timezone
+from graphql.type import GraphQLResolveInfo
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
from base.redis import redis
from base.resolvers import mutation
-from datetime import datetime, timezone
-from services.following import FollowingManager, FollowingResult
-
-import json
+from services.following import FollowingManager, FollowingResult, Following
+from validations.inbox import Message
@mutation.field("createMessage")
@login_required
async def create_message(_, info, chat: str, body: str, replyTo=None):
- """create message with :body for :chat_id replying to :replyTo optionally"""
+ """ create message with :body for :chat_id replying to :replyTo optionally """
auth: AuthCredentials = info.context["request"].auth
chat = await redis.execute("GET", f"chats/{chat}")
if not chat:
- return {"error": "chat is not exist"}
+ return {
+ "error": "chat is not exist"
+ }
else:
chat = dict(json.loads(chat))
message_id = await redis.execute("GET", f"chats/{chat['id']}/next_message_id")
message_id = int(message_id)
new_message = {
- "chatId": chat["id"],
+ "chatId": chat['id'],
"id": message_id,
"author": auth.user_id,
"body": body,
- "createdAt": int(datetime.now(tz=timezone.utc).timestamp()),
+ "createdAt": int(datetime.now(tz=timezone.utc).timestamp())
}
if replyTo:
- new_message["replyTo"] = replyTo
- chat["updatedAt"] = new_message["createdAt"]
+ new_message['replyTo'] = replyTo
+ chat['updatedAt'] = new_message['createdAt']
await redis.execute("SET", f"chats/{chat['id']}", json.dumps(chat))
print(f"[inbox] creating message {new_message}")
await redis.execute(
@@ -41,12 +46,17 @@ async def create_message(_, info, chat: str, body: str, replyTo=None):
users = chat["users"]
for user_slug in users:
- await redis.execute("LPUSH", f"chats/{chat['id']}/unread/{user_slug}", str(message_id))
+ await redis.execute(
+ "LPUSH", f"chats/{chat['id']}/unread/{user_slug}", str(message_id)
+ )
- result = FollowingResult("NEW", "chat", new_message)
- await FollowingManager.push("chat", result)
+ result = FollowingResult("NEW", 'chat', new_message)
+ await FollowingManager.push('chat', result)
- return {"message": new_message, "error": None}
+ return {
+ "message": new_message,
+ "error": None
+ }
@mutation.field("updateMessage")
@@ -71,10 +81,13 @@ async def update_message(_, info, chat_id: str, message_id: int, body: str):
await redis.execute("SET", f"chats/{chat_id}/messages/{message_id}", json.dumps(message))
- result = FollowingResult("UPDATED", "chat", message)
- await FollowingManager.push("chat", result)
+ result = FollowingResult("UPDATED", 'chat', message)
+ await FollowingManager.push('chat', result)
- return {"message": message, "error": None}
+ return {
+ "message": message,
+ "error": None
+ }
@mutation.field("deleteMessage")
@@ -101,7 +114,7 @@ async def delete_message(_, info, chat_id: str, message_id: int):
for user_id in users:
await redis.execute("LREM", f"chats/{chat_id}/unread/{user_id}", 0, str(message_id))
- result = FollowingResult("DELETED", "chat", message)
+ result = FollowingResult("DELETED", 'chat', message)
await FollowingManager.push(result)
return {}
@@ -124,4 +137,6 @@ async def mark_as_read(_, info, chat_id: str, messages: [int]):
for message_id in messages:
await redis.execute("LREM", f"chats/{chat_id}/unread/{auth.user_id}", 0, str(message_id))
- return {"error": None}
+ return {
+ "error": None
+ }
diff --git a/resolvers/inbox/search.py b/resolvers/inbox/search.py
index 510ce52c..1ca340e5 100644
--- a/resolvers/inbox/search.py
+++ b/resolvers/inbox/search.py
@@ -1,14 +1,13 @@
+import json
+from datetime import datetime, timezone, timedelta
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
-from base.orm import local_session
from base.redis import redis
from base.resolvers import query
-from datetime import datetime, timedelta, timezone
+from base.orm import local_session
from orm.user import AuthorFollower, User
from resolvers.inbox.load import load_messages
-import json
-
@query.field("searchRecipients")
@login_required
@@ -18,7 +17,7 @@ async def search_recipients(_, info, query: str, limit: int = 50, offset: int =
auth: AuthCredentials = info.context["request"].auth
talk_before = await redis.execute("GET", f"/chats_by_user/{auth.user_id}")
if talk_before:
- talk_before = list(json.loads(talk_before))[offset : offset + limit]
+ talk_before = list(json.loads(talk_before))[offset:offset + limit]
for chat_id in talk_before:
members = await redis.execute("GET", f"/chats/{chat_id}/users")
if members:
@@ -32,24 +31,23 @@ async def search_recipients(_, info, query: str, limit: int = 50, offset: int =
with local_session() as session:
# followings
- result += (
- session.query(AuthorFollower.author)
- .join(User, User.id == AuthorFollower.follower)
- .where(User.slug.startswith(query))
- .offset(offset + len(result))
- .limit(more_amount)
- )
+ result += session.query(AuthorFollower.author).join(
+ User, User.id == AuthorFollower.follower
+ ).where(
+ User.slug.startswith(query)
+ ).offset(offset + len(result)).limit(more_amount)
more_amount = limit
# followers
- result += (
- session.query(AuthorFollower.follower)
- .join(User, User.id == AuthorFollower.author)
- .where(User.slug.startswith(query))
- .offset(offset + len(result))
- .limit(offset + len(result) + limit)
- )
- return {"members": list(result), "error": None}
+ result += session.query(AuthorFollower.follower).join(
+ User, User.id == AuthorFollower.author
+ ).where(
+ User.slug.startswith(query)
+ ).offset(offset + len(result)).limit(offset + len(result) + limit)
+ return {
+ "members": list(result),
+ "error": None
+ }
@query.field("searchMessages")
@@ -59,22 +57,22 @@ async def search_user_chats(by, messages, user_id: int, limit, offset):
cids.union(set(await redis.execute("SMEMBERS", "chats_by_user/" + str(user_id))))
messages = []
- by_author = by.get("author")
+ by_author = by.get('author')
if by_author:
# all author's messages
cids.union(set(await redis.execute("SMEMBERS", f"chats_by_user/{by_author}")))
# author's messages in filtered chat
messages.union(set(filter(lambda m: m["author"] == by_author, list(messages))))
for c in cids:
- c = c.decode("utf-8")
+ c = c.decode('utf-8')
messages = await load_messages(c, limit, offset)
- body_like = by.get("body")
+ body_like = by.get('body')
if body_like:
# search in all messages in all user's chats
for c in cids:
# FIXME: use redis scan here
- c = c.decode("utf-8")
+ c = c.decode('utf-8')
mmm = await load_messages(c, limit, offset)
for m in mmm:
if body_like in m["body"]:
@@ -85,12 +83,13 @@ async def search_user_chats(by, messages, user_id: int, limit, offset):
days = by.get("days")
if days:
- messages.extend(
- filter(
- list(messages),
- key=lambda m: (
- datetime.now(tz=timezone.utc) - int(m["createdAt"]) < timedelta(days=by["days"])
- ),
+ messages.extend(filter(
+ list(messages),
+ key=lambda m: (
+ datetime.now(tz=timezone.utc) - int(m["createdAt"]) < timedelta(days=by["days"])
)
- )
- return {"messages": messages, "error": None}
+ ))
+ return {
+ "messages": messages,
+ "error": None
+ }
diff --git a/resolvers/notifications.py b/resolvers/notifications.py
index 3ece629e..0cfc2244 100644
--- a/resolvers/notifications.py
+++ b/resolvers/notifications.py
@@ -1,9 +1,10 @@
-from auth.authenticate import login_required
+from sqlalchemy import select, desc, and_, update
+
from auth.credentials import AuthCredentials
+from base.resolvers import query, mutation
+from auth.authenticate import login_required
from base.orm import local_session
-from base.resolvers import mutation, query
from orm import Notification
-from sqlalchemy import and_, desc, select, update
@query.field("loadNotifications")
@@ -15,26 +16,25 @@ async def load_notifications(_, info, params=None):
auth: AuthCredentials = info.context["request"].auth
user_id = auth.user_id
- limit = params.get("limit", 50)
- offset = params.get("offset", 0)
+ limit = params.get('limit', 50)
+ offset = params.get('offset', 0)
- q = (
- select(Notification)
- .where(Notification.user == user_id)
- .order_by(desc(Notification.createdAt))
- .limit(limit)
- .offset(offset)
- )
+ q = select(Notification).where(
+ Notification.user == user_id
+ ).order_by(desc(Notification.createdAt)).limit(limit).offset(offset)
notifications = []
with local_session() as session:
- total_count = session.query(Notification).where(Notification.user == user_id).count()
+ total_count = session.query(Notification).where(
+ Notification.user == user_id
+ ).count()
- total_unread_count = (
- session.query(Notification)
- .where(and_(Notification.user == user_id, Notification.seen == False)) # noqa: E712
- .count()
- )
+ total_unread_count = session.query(Notification).where(
+ and_(
+ Notification.user == user_id,
+ Notification.seen == False
+ )
+ ).count()
for [notification] in session.execute(q):
notification.type = notification.type.name
@@ -43,7 +43,7 @@ async def load_notifications(_, info, params=None):
return {
"notifications": notifications,
"totalCount": total_count,
- "totalUnreadCount": total_unread_count,
+ "totalUnreadCount": total_unread_count
}
@@ -54,11 +54,9 @@ async def mark_notification_as_read(_, info, notification_id: int):
user_id = auth.user_id
with local_session() as session:
- notification = (
- session.query(Notification)
- .where(and_(Notification.id == notification_id, Notification.user == user_id))
- .one()
- )
+ notification = session.query(Notification).where(
+ and_(Notification.id == notification_id, Notification.user == user_id)
+ ).one()
notification.seen = True
session.commit()
@@ -71,11 +69,12 @@ async def mark_all_notifications_as_read(_, info):
auth: AuthCredentials = info.context["request"].auth
user_id = auth.user_id
- statement = (
- update(Notification)
- .where(and_(Notification.user == user_id, Notification.seen == False)) # noqa: E712
- .values(seen=True)
- )
+ statement = update(Notification).where(
+ and_(
+ Notification.user == user_id,
+ Notification.seen == False
+ )
+ ).values(seen=True)
with local_session() as session:
try:
diff --git a/resolvers/upload.py b/resolvers/upload.py
index 9649222c..44c7b81c 100644
--- a/resolvers/upload.py
+++ b/resolvers/upload.py
@@ -1,37 +1,34 @@
-from botocore.exceptions import BotoCoreError, ClientError
-from starlette.responses import JSONResponse
-
-import boto3
import os
import shutil
import tempfile
import uuid
+import boto3
+from botocore.exceptions import BotoCoreError, ClientError
+from starlette.responses import JSONResponse
-STORJ_ACCESS_KEY = os.environ.get("STORJ_ACCESS_KEY")
-STORJ_SECRET_KEY = os.environ.get("STORJ_SECRET_KEY")
-STORJ_END_POINT = os.environ.get("STORJ_END_POINT")
-STORJ_BUCKET_NAME = os.environ.get("STORJ_BUCKET_NAME")
-CDN_DOMAIN = os.environ.get("CDN_DOMAIN")
+STORJ_ACCESS_KEY = os.environ.get('STORJ_ACCESS_KEY')
+STORJ_SECRET_KEY = os.environ.get('STORJ_SECRET_KEY')
+STORJ_END_POINT = os.environ.get('STORJ_END_POINT')
+STORJ_BUCKET_NAME = os.environ.get('STORJ_BUCKET_NAME')
+CDN_DOMAIN = os.environ.get('CDN_DOMAIN')
async def upload_handler(request):
form = await request.form()
- file = form.get("file")
+ file = form.get('file')
if file is None:
- return JSONResponse({"error": "No file uploaded"}, status_code=400)
+ return JSONResponse({'error': 'No file uploaded'}, status_code=400)
file_name, file_extension = os.path.splitext(file.filename)
- key = "files/" + str(uuid.uuid4()) + file_extension
+ key = 'files/' + str(uuid.uuid4()) + file_extension
# Create an S3 client with Storj configuration
- s3 = boto3.client(
- "s3",
- aws_access_key_id=STORJ_ACCESS_KEY,
- aws_secret_access_key=STORJ_SECRET_KEY,
- endpoint_url=STORJ_END_POINT,
- )
+ s3 = boto3.client('s3',
+ aws_access_key_id=STORJ_ACCESS_KEY,
+ aws_secret_access_key=STORJ_SECRET_KEY,
+ endpoint_url=STORJ_END_POINT)
try:
# Save the uploaded file to a temporary file
@@ -42,13 +39,18 @@ async def upload_handler(request):
Filename=tmp_file.name,
Bucket=STORJ_BUCKET_NAME,
Key=key,
- ExtraArgs={"ContentType": file.content_type},
+ ExtraArgs={
+ "ContentType": file.content_type
+ }
)
- url = "https://" + CDN_DOMAIN + "/" + key
+ url = 'https://' + CDN_DOMAIN + '/' + key
- return JSONResponse({"url": url, "originalFilename": file.filename})
+ return JSONResponse({'url': url, 'originalFilename': file.filename})
except (BotoCoreError, ClientError) as e:
print(e)
- return JSONResponse({"error": "Failed to upload file"}, status_code=500)
+ return JSONResponse({'error': 'Failed to upload file'}, status_code=500)
+
+
+
diff --git a/resolvers/zine/following.py b/resolvers/zine/following.py
index bc92371a..99481571 100644
--- a/resolvers/zine/following.py
+++ b/resolvers/zine/following.py
@@ -1,12 +1,17 @@
+import asyncio
+from base.orm import local_session
+from base.resolvers import mutation
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
-from base.resolvers import mutation
-
# from resolvers.community import community_follow, community_unfollow
+from orm.user import AuthorFollower
+from orm.topic import TopicFollower
+from orm.shout import ShoutReactionsFollower
from resolvers.zine.profile import author_follow, author_unfollow
from resolvers.zine.reactions import reactions_follow, reactions_unfollow
from resolvers.zine.topics import topic_follow, topic_unfollow
-from services.following import FollowingManager, FollowingResult
+from services.following import Following, FollowingManager, FollowingResult
+from graphql.type import GraphQLResolveInfo
@mutation.field("follow")
@@ -17,20 +22,20 @@ async def follow(_, info, what, slug):
try:
if what == "AUTHOR":
if author_follow(auth.user_id, slug):
- result = FollowingResult("NEW", "author", slug)
- await FollowingManager.push("author", result)
+ result = FollowingResult("NEW", 'author', slug)
+ await FollowingManager.push('author', result)
elif what == "TOPIC":
if topic_follow(auth.user_id, slug):
- result = FollowingResult("NEW", "topic", slug)
- await FollowingManager.push("topic", result)
+ result = FollowingResult("NEW", 'topic', slug)
+ await FollowingManager.push('topic', result)
elif what == "COMMUNITY":
if False: # TODO: use community_follow(auth.user_id, slug):
- result = FollowingResult("NEW", "community", slug)
- await FollowingManager.push("community", result)
+ result = FollowingResult("NEW", 'community', slug)
+ await FollowingManager.push('community', result)
elif what == "REACTIONS":
if reactions_follow(auth.user_id, slug):
- result = FollowingResult("NEW", "shout", slug)
- await FollowingManager.push("shout", result)
+ result = FollowingResult("NEW", 'shout', slug)
+ await FollowingManager.push('shout', result)
except Exception as e:
print(Exception(e))
return {"error": str(e)}
@@ -46,20 +51,20 @@ async def unfollow(_, info, what, slug):
try:
if what == "AUTHOR":
if author_unfollow(auth.user_id, slug):
- result = FollowingResult("DELETED", "author", slug)
- await FollowingManager.push("author", result)
+ result = FollowingResult("DELETED", 'author', slug)
+ await FollowingManager.push('author', result)
elif what == "TOPIC":
if topic_unfollow(auth.user_id, slug):
- result = FollowingResult("DELETED", "topic", slug)
- await FollowingManager.push("topic", result)
+ result = FollowingResult("DELETED", 'topic', slug)
+ await FollowingManager.push('topic', result)
elif what == "COMMUNITY":
if False: # TODO: use community_unfollow(auth.user_id, slug):
- result = FollowingResult("DELETED", "community", slug)
- await FollowingManager.push("community", result)
+ result = FollowingResult("DELETED", 'community', slug)
+ await FollowingManager.push('community', result)
elif what == "REACTIONS":
if reactions_unfollow(auth.user_id, slug):
- result = FollowingResult("DELETED", "shout", slug)
- await FollowingManager.push("shout", result)
+ result = FollowingResult("DELETED", 'shout', slug)
+ await FollowingManager.push('shout', result)
except Exception as e:
return {"error": str(e)}
diff --git a/resolvers/zine/load.py b/resolvers/zine/load.py
index 90d790ac..4619efa6 100644
--- a/resolvers/zine/load.py
+++ b/resolvers/zine/load.py
@@ -1,47 +1,49 @@
+from datetime import datetime, timedelta, timezone
+
+from sqlalchemy.orm import joinedload, aliased
+from sqlalchemy.sql.expression import desc, asc, select, func, case, and_, text, nulls_last
+
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
-from base.exceptions import ObjectNotExist
+from base.exceptions import ObjectNotExist, OperationNotAllowed
from base.orm import local_session
from base.resolvers import query
-from datetime import datetime, timedelta, timezone
from orm import TopicFollower
from orm.reaction import Reaction, ReactionKind
from orm.shout import Shout, ShoutAuthor, ShoutTopic
from orm.user import AuthorFollower
-from sqlalchemy.orm import aliased, joinedload
-from sqlalchemy.sql.expression import and_, asc, case, desc, func, nulls_last, select
def add_stat_columns(q):
aliased_reaction = aliased(Reaction)
q = q.outerjoin(aliased_reaction).add_columns(
- func.sum(aliased_reaction.id).label("reacted_stat"),
- func.sum(case((aliased_reaction.kind == ReactionKind.COMMENT, 1), else_=0)).label(
- "commented_stat"
- ),
+ func.sum(
+ aliased_reaction.id
+ ).label('reacted_stat'),
func.sum(
case(
- # do not count comments' reactions
- (aliased_reaction.replyTo.is_not(None), 0),
- (aliased_reaction.kind == ReactionKind.AGREE, 1),
- (aliased_reaction.kind == ReactionKind.DISAGREE, -1),
- (aliased_reaction.kind == ReactionKind.PROOF, 1),
- (aliased_reaction.kind == ReactionKind.DISPROOF, -1),
- (aliased_reaction.kind == ReactionKind.ACCEPT, 1),
- (aliased_reaction.kind == ReactionKind.REJECT, -1),
- (aliased_reaction.kind == ReactionKind.LIKE, 1),
- (aliased_reaction.kind == ReactionKind.DISLIKE, -1),
- else_=0,
+ (aliased_reaction.kind == ReactionKind.COMMENT, 1),
+ else_=0
)
- ).label("rating_stat"),
- func.max(
- case(
- (aliased_reaction.kind != ReactionKind.COMMENT, None),
- else_=aliased_reaction.createdAt,
- )
- ).label("last_comment"),
- )
+ ).label('commented_stat'),
+ func.sum(case(
+ # do not count comments' reactions
+ (aliased_reaction.replyTo.is_not(None), 0),
+ (aliased_reaction.kind == ReactionKind.AGREE, 1),
+ (aliased_reaction.kind == ReactionKind.DISAGREE, -1),
+ (aliased_reaction.kind == ReactionKind.PROOF, 1),
+ (aliased_reaction.kind == ReactionKind.DISPROOF, -1),
+ (aliased_reaction.kind == ReactionKind.ACCEPT, 1),
+ (aliased_reaction.kind == ReactionKind.REJECT, -1),
+ (aliased_reaction.kind == ReactionKind.LIKE, 1),
+ (aliased_reaction.kind == ReactionKind.DISLIKE, -1),
+ else_=0)
+ ).label('rating_stat'),
+ func.max(case(
+ (aliased_reaction.kind != ReactionKind.COMMENT, None),
+ else_=aliased_reaction.createdAt
+ )).label('last_comment'))
return q
@@ -58,7 +60,7 @@ def apply_filters(q, filters, user_id=None):
if filters.get("layout"):
q = q.filter(Shout.layout == filters.get("layout"))
- if filters.get("excludeLayout"):
+ if filters.get('excludeLayout'):
q = q.filter(Shout.layout != filters.get("excludeLayout"))
if filters.get("author"):
q = q.filter(Shout.authors.any(slug=filters.get("author")))
@@ -85,27 +87,27 @@ async def load_shout(_, info, slug=None, shout_id=None):
q = add_stat_columns(q)
if slug is not None:
- q = q.filter(Shout.slug == slug)
+ q = q.filter(
+ Shout.slug == slug
+ )
if shout_id is not None:
- q = q.filter(Shout.id == shout_id)
+ q = q.filter(
+ Shout.id == shout_id
+ )
- q = q.filter(Shout.deletedAt.is_(None)).group_by(Shout.id)
+ q = q.filter(
+ Shout.deletedAt.is_(None)
+ ).group_by(Shout.id)
try:
- [
- shout,
- reacted_stat,
- commented_stat,
- rating_stat,
- last_comment,
- ] = session.execute(q).first()
+ [shout, reacted_stat, commented_stat, rating_stat, last_comment] = session.execute(q).first()
shout.stat = {
"viewed": shout.views,
"reacted": reacted_stat,
"commented": commented_stat,
- "rating": rating_stat,
+ "rating": rating_stat
}
for author_caption in session.query(ShoutAuthor).join(Shout).where(Shout.slug == slug):
@@ -140,13 +142,14 @@ async def load_shouts_by(_, info, options):
:return: Shout[]
"""
- q = (
- select(Shout)
- .options(
- joinedload(Shout.authors),
- joinedload(Shout.topics),
+ q = select(Shout).options(
+ joinedload(Shout.authors),
+ joinedload(Shout.topics),
+ ).where(
+ and_(
+ Shout.deletedAt.is_(None),
+ Shout.layout.is_not(None)
)
- .where(and_(Shout.deletedAt.is_(None), Shout.layout.is_not(None)))
)
q = add_stat_columns(q)
@@ -156,7 +159,7 @@ async def load_shouts_by(_, info, options):
order_by = options.get("order_by", Shout.publishedAt)
- query_order_by = desc(order_by) if options.get("order_by_desc", True) else asc(order_by)
+ query_order_by = desc(order_by) if options.get('order_by_desc', True) else asc(order_by)
offset = options.get("offset", 0)
limit = options.get("limit", 10)
@@ -166,19 +169,13 @@ async def load_shouts_by(_, info, options):
with local_session() as session:
shouts_map = {}
- for [
- shout,
- reacted_stat,
- commented_stat,
- rating_stat,
- last_comment,
- ] in session.execute(q).unique():
+ for [shout, reacted_stat, commented_stat, rating_stat, last_comment] in session.execute(q).unique():
shouts.append(shout)
shout.stat = {
"viewed": shout.views,
"reacted": reacted_stat,
"commented": commented_stat,
- "rating": rating_stat,
+ "rating": rating_stat
}
shouts_map[shout.id] = shout
@@ -191,13 +188,11 @@ async def get_drafts(_, info):
auth: AuthCredentials = info.context["request"].auth
user_id = auth.user_id
- q = (
- select(Shout)
- .options(
- joinedload(Shout.authors),
- joinedload(Shout.topics),
- )
- .where(and_(Shout.deletedAt.is_(None), Shout.createdBy == user_id))
+ q = select(Shout).options(
+ joinedload(Shout.authors),
+ joinedload(Shout.topics),
+ ).where(
+ and_(Shout.deletedAt.is_(None), Shout.createdBy == user_id)
)
q = q.group_by(Shout.id)
@@ -216,26 +211,24 @@ async def get_my_feed(_, info, options):
auth: AuthCredentials = info.context["request"].auth
user_id = auth.user_id
- subquery = (
- select(Shout.id)
- .join(ShoutAuthor)
- .join(AuthorFollower, AuthorFollower.follower == user_id)
- .join(ShoutTopic)
- .join(TopicFollower, TopicFollower.follower == user_id)
+ subquery = select(Shout.id).join(
+ ShoutAuthor
+ ).join(
+ AuthorFollower, AuthorFollower.follower == user_id
+ ).join(
+ ShoutTopic
+ ).join(
+ TopicFollower, TopicFollower.follower == user_id
)
- q = (
- select(Shout)
- .options(
- joinedload(Shout.authors),
- joinedload(Shout.topics),
- )
- .where(
- and_(
- Shout.publishedAt.is_not(None),
- Shout.deletedAt.is_(None),
- Shout.id.in_(subquery),
- )
+ q = select(Shout).options(
+ joinedload(Shout.authors),
+ joinedload(Shout.topics),
+ ).where(
+ and_(
+ Shout.publishedAt.is_not(None),
+ Shout.deletedAt.is_(None),
+ Shout.id.in_(subquery)
)
)
@@ -244,7 +237,7 @@ async def get_my_feed(_, info, options):
order_by = options.get("order_by", Shout.publishedAt)
- query_order_by = desc(order_by) if options.get("order_by_desc", True) else asc(order_by)
+ query_order_by = desc(order_by) if options.get('order_by_desc', True) else asc(order_by)
offset = options.get("offset", 0)
limit = options.get("limit", 10)
@@ -253,19 +246,13 @@ async def get_my_feed(_, info, options):
shouts = []
with local_session() as session:
shouts_map = {}
- for [
- shout,
- reacted_stat,
- commented_stat,
- rating_stat,
- last_comment,
- ] in session.execute(q).unique():
+ for [shout, reacted_stat, commented_stat, rating_stat, last_comment] in session.execute(q).unique():
shouts.append(shout)
shout.stat = {
"viewed": shout.views,
"reacted": reacted_stat,
"commented": commented_stat,
- "rating": rating_stat,
+ "rating": rating_stat
}
shouts_map[shout.id] = shout
diff --git a/resolvers/zine/profile.py b/resolvers/zine/profile.py
index 7275226d..552af43f 100644
--- a/resolvers/zine/profile.py
+++ b/resolvers/zine/profile.py
@@ -1,16 +1,17 @@
+from typing import List
+from datetime import datetime, timedelta, timezone
+from sqlalchemy import and_, func, distinct, select, literal
+from sqlalchemy.orm import aliased, joinedload
+
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
from base.orm import local_session
from base.resolvers import mutation, query
-from datetime import datetime, timedelta, timezone
from orm.reaction import Reaction, ReactionKind
from orm.shout import ShoutAuthor, ShoutTopic
from orm.topic import Topic, TopicFollower
from orm.user import AuthorFollower, Role, User, UserRating, UserRole
from resolvers.zine.topics import followed_by_user
-from sqlalchemy import and_, distinct, func, literal, select
-from sqlalchemy.orm import aliased, joinedload
-from typing import List
def add_author_stat_columns(q):
@@ -20,24 +21,24 @@ def add_author_stat_columns(q):
# user_rating_aliased = aliased(UserRating)
q = q.outerjoin(shout_author_aliased).add_columns(
- func.count(distinct(shout_author_aliased.shout)).label("shouts_stat")
+ func.count(distinct(shout_author_aliased.shout)).label('shouts_stat')
)
q = q.outerjoin(author_followers, author_followers.author == User.id).add_columns(
- func.count(distinct(author_followers.follower)).label("followers_stat")
+ func.count(distinct(author_followers.follower)).label('followers_stat')
)
q = q.outerjoin(author_following, author_following.follower == User.id).add_columns(
- func.count(distinct(author_following.author)).label("followings_stat")
+ func.count(distinct(author_following.author)).label('followings_stat')
)
- q = q.add_columns(literal(0).label("rating_stat"))
+ q = q.add_columns(literal(0).label('rating_stat'))
# FIXME
# q = q.outerjoin(user_rating_aliased, user_rating_aliased.user == User.id).add_columns(
# # TODO: check
# func.sum(user_rating_aliased.value).label('rating_stat')
# )
- q = q.add_columns(literal(0).label("commented_stat"))
+ q = q.add_columns(literal(0).label('commented_stat'))
# q = q.outerjoin(Reaction, and_(Reaction.createdBy == User.id, Reaction.body.is_not(None))).add_columns(
# func.count(distinct(Reaction.id)).label('commented_stat')
# )
@@ -48,19 +49,13 @@ def add_author_stat_columns(q):
def add_stat(author, stat_columns):
- [
- shouts_stat,
- followers_stat,
- followings_stat,
- rating_stat,
- commented_stat,
- ] = stat_columns
+ [shouts_stat, followers_stat, followings_stat, rating_stat, commented_stat] = stat_columns
author.stat = {
"shouts": shouts_stat,
"followers": followers_stat,
"followings": followings_stat,
"rating": rating_stat,
- "commented": commented_stat,
+ "commented": commented_stat
}
return author
@@ -124,10 +119,10 @@ async def user_followers(_, _info, slug) -> List[User]:
q = add_author_stat_columns(q)
aliased_user = aliased(User)
- q = (
- q.join(AuthorFollower, AuthorFollower.follower == User.id)
- .join(aliased_user, aliased_user.id == AuthorFollower.author)
- .where(aliased_user.slug == slug)
+ q = q.join(AuthorFollower, AuthorFollower.follower == User.id).join(
+ aliased_user, aliased_user.id == AuthorFollower.author
+ ).where(
+ aliased_user.slug == slug
)
return get_authors_from_query(q)
@@ -155,10 +150,15 @@ async def update_profile(_, info, profile):
with local_session() as session:
user = session.query(User).filter(User.id == user_id).one()
if not user:
- return {"error": "canoot find user"}
+ return {
+ "error": "canoot find user"
+ }
user.update(profile)
session.commit()
- return {"error": None, "author": user}
+ return {
+ "error": None,
+ "author": user
+ }
@mutation.field("rateUser")
@@ -200,10 +200,13 @@ def author_follow(user_id, slug):
def author_unfollow(user_id, slug):
with local_session() as session:
flw = (
- session.query(AuthorFollower)
- .join(User, User.id == AuthorFollower.author)
- .filter(and_(AuthorFollower.follower == user_id, User.slug == slug))
- .first()
+ session.query(
+ AuthorFollower
+ ).join(User, User.id == AuthorFollower.author).filter(
+ and_(
+ AuthorFollower.follower == user_id, User.slug == slug
+ )
+ ).first()
)
if flw:
session.delete(flw)
@@ -229,16 +232,12 @@ async def get_author(_, _info, slug):
[author] = get_authors_from_query(q)
with local_session() as session:
- comments_count = (
- session.query(Reaction)
- .where(
- and_(
- Reaction.createdBy == author.id,
- Reaction.kind == ReactionKind.COMMENT,
- )
+ comments_count = session.query(Reaction).where(
+ and_(
+ Reaction.createdBy == author.id,
+ Reaction.kind == ReactionKind.COMMENT
)
- .count()
- )
+ ).count()
author.stat["commented"] = comments_count
return author
@@ -261,7 +260,9 @@ async def load_authors_by(_, info, by, limit, offset):
days_before = datetime.now(tz=timezone.utc) - timedelta(days=by["createdAt"])
q = q.filter(User.createdAt > days_before)
- q = q.order_by(by.get("order", User.createdAt)).limit(limit).offset(offset)
+ q = q.order_by(
+ by.get("order", User.createdAt)
+ ).limit(limit).offset(offset)
return get_authors_from_query(q)
@@ -272,13 +273,13 @@ async def load_my_subscriptions(_, info):
auth = info.context["request"].auth
user_id = auth.user_id
- authors_query = (
- select(User)
- .join(AuthorFollower, AuthorFollower.author == User.id)
- .where(AuthorFollower.follower == user_id)
+ authors_query = select(User).join(AuthorFollower, AuthorFollower.author == User.id).where(
+ AuthorFollower.follower == user_id
)
- topics_query = select(Topic).join(TopicFollower).where(TopicFollower.follower == user_id)
+ topics_query = select(Topic).join(TopicFollower).where(
+ TopicFollower.follower == user_id
+ )
topics = []
authors = []
@@ -290,4 +291,7 @@ async def load_my_subscriptions(_, info):
for [topic] in session.execute(topics_query):
topics.append(topic)
- return {"topics": topics, "authors": authors}
+ return {
+ "topics": topics,
+ "authors": authors
+ }
diff --git a/resolvers/zine/reactions.py b/resolvers/zine/reactions.py
index 680cac52..1c132b69 100644
--- a/resolvers/zine/reactions.py
+++ b/resolvers/zine/reactions.py
@@ -1,37 +1,42 @@
+from datetime import datetime, timedelta, timezone
+from sqlalchemy import and_, asc, desc, select, text, func, case
+from sqlalchemy.orm import aliased
+
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
from base.exceptions import OperationNotAllowed
from base.orm import local_session
from base.resolvers import mutation, query
-from datetime import datetime, timedelta, timezone
from orm.reaction import Reaction, ReactionKind
from orm.shout import Shout, ShoutReactionsFollower
from orm.user import User
from services.notifications.notification_service import notification_service
-from sqlalchemy import and_, asc, case, desc, func, select, text
-from sqlalchemy.orm import aliased
def add_reaction_stat_columns(q):
aliased_reaction = aliased(Reaction)
q = q.outerjoin(aliased_reaction, Reaction.id == aliased_reaction.replyTo).add_columns(
- func.sum(aliased_reaction.id).label("reacted_stat"),
- func.sum(case((aliased_reaction.body.is_not(None), 1), else_=0)).label("commented_stat"),
+ func.sum(
+ aliased_reaction.id
+ ).label('reacted_stat'),
func.sum(
case(
- (aliased_reaction.kind == ReactionKind.AGREE, 1),
- (aliased_reaction.kind == ReactionKind.DISAGREE, -1),
- (aliased_reaction.kind == ReactionKind.PROOF, 1),
- (aliased_reaction.kind == ReactionKind.DISPROOF, -1),
- (aliased_reaction.kind == ReactionKind.ACCEPT, 1),
- (aliased_reaction.kind == ReactionKind.REJECT, -1),
- (aliased_reaction.kind == ReactionKind.LIKE, 1),
- (aliased_reaction.kind == ReactionKind.DISLIKE, -1),
- else_=0,
+ (aliased_reaction.body.is_not(None), 1),
+ else_=0
)
- ).label("rating_stat"),
- )
+ ).label('commented_stat'),
+ func.sum(case(
+ (aliased_reaction.kind == ReactionKind.AGREE, 1),
+ (aliased_reaction.kind == ReactionKind.DISAGREE, -1),
+ (aliased_reaction.kind == ReactionKind.PROOF, 1),
+ (aliased_reaction.kind == ReactionKind.DISPROOF, -1),
+ (aliased_reaction.kind == ReactionKind.ACCEPT, 1),
+ (aliased_reaction.kind == ReactionKind.REJECT, -1),
+ (aliased_reaction.kind == ReactionKind.LIKE, 1),
+ (aliased_reaction.kind == ReactionKind.DISLIKE, -1),
+ else_=0)
+ ).label('rating_stat'))
return q
@@ -42,19 +47,17 @@ def reactions_follow(user_id, shout_id: int, auto=False):
shout = session.query(Shout).where(Shout.id == shout_id).one()
following = (
- session.query(ShoutReactionsFollower)
- .where(
- and_(
- ShoutReactionsFollower.follower == user_id,
- ShoutReactionsFollower.shout == shout.id,
- )
- )
- .first()
+ session.query(ShoutReactionsFollower).where(and_(
+ ShoutReactionsFollower.follower == user_id,
+ ShoutReactionsFollower.shout == shout.id,
+ )).first()
)
if not following:
following = ShoutReactionsFollower.create(
- follower=user_id, shout=shout.id, auto=auto
+ follower=user_id,
+ shout=shout.id,
+ auto=auto
)
session.add(following)
session.commit()
@@ -69,14 +72,10 @@ def reactions_unfollow(user_id: int, shout_id: int):
shout = session.query(Shout).where(Shout.id == shout_id).one()
following = (
- session.query(ShoutReactionsFollower)
- .where(
- and_(
- ShoutReactionsFollower.follower == user_id,
- ShoutReactionsFollower.shout == shout.id,
- )
- )
- .first()
+ session.query(ShoutReactionsFollower).where(and_(
+ ShoutReactionsFollower.follower == user_id,
+ ShoutReactionsFollower.shout == shout.id
+ )).first()
)
if following:
@@ -89,31 +88,30 @@ def reactions_unfollow(user_id: int, shout_id: int):
def is_published_author(session, user_id):
- """checks if user has at least one publication"""
- return (
- session.query(Shout)
- .where(Shout.authors.contains(user_id))
- .filter(and_(Shout.publishedAt.is_not(None), Shout.deletedAt.is_(None)))
- .count()
- > 0
- )
+ ''' checks if user has at least one publication '''
+ return session.query(
+ Shout
+ ).where(
+ Shout.authors.contains(user_id)
+ ).filter(
+ and_(
+ Shout.publishedAt.is_not(None),
+ Shout.deletedAt.is_(None)
+ )
+ ).count() > 0
def check_to_publish(session, user_id, reaction):
- """set shout to public if publicated approvers amount > 4"""
+ ''' set shout to public if publicated approvers amount > 4 '''
if not reaction.replyTo and reaction.kind in [
ReactionKind.ACCEPT,
ReactionKind.LIKE,
- ReactionKind.PROOF,
+ ReactionKind.PROOF
]:
if is_published_author(user_id):
# now count how many approvers are voted already
- approvers_reactions = (
- session.query(Reaction).where(Reaction.shout == reaction.shout).all()
- )
- approvers = [
- user_id,
- ]
+ approvers_reactions = session.query(Reaction).where(Reaction.shout == reaction.shout).all()
+ approvers = [user_id, ]
for ar in approvers_reactions:
a = ar.createdBy
if is_published_author(session, a):
@@ -124,11 +122,11 @@ def check_to_publish(session, user_id, reaction):
def check_to_hide(session, user_id, reaction):
- """hides any shout if 20% of reactions are negative"""
+ ''' hides any shout if 20% of reactions are negative '''
if not reaction.replyTo and reaction.kind in [
ReactionKind.REJECT,
ReactionKind.DISLIKE,
- ReactionKind.DISPROOF,
+ ReactionKind.DISPROOF
]:
# if is_published_author(user):
approvers_reactions = session.query(Reaction).where(Reaction.shout == reaction.shout).all()
@@ -137,7 +135,7 @@ def check_to_hide(session, user_id, reaction):
if r.kind in [
ReactionKind.REJECT,
ReactionKind.DISLIKE,
- ReactionKind.DISPROOF,
+ ReactionKind.DISPROOF
]:
rejects += 1
if len(approvers_reactions) / rejects < 5:
@@ -148,14 +146,14 @@ def check_to_hide(session, user_id, reaction):
def set_published(session, shout_id):
s = session.query(Shout).where(Shout.id == shout_id).first()
s.publishedAt = datetime.now(tz=timezone.utc)
- s.visibility = text("public")
+ s.visibility = text('public')
session.add(s)
session.commit()
def set_hidden(session, shout_id):
s = session.query(Shout).where(Shout.id == shout_id).first()
- s.visibility = text("community")
+ s.visibility = text('community')
session.add(s)
session.commit()
@@ -164,46 +162,37 @@ def set_hidden(session, shout_id):
@login_required
async def create_reaction(_, info, reaction):
auth: AuthCredentials = info.context["request"].auth
- reaction["createdBy"] = auth.user_id
+ reaction['createdBy'] = auth.user_id
rdict = {}
with local_session() as session:
shout = session.query(Shout).where(Shout.id == reaction["shout"]).one()
author = session.query(User).where(User.id == auth.user_id).one()
- if reaction["kind"] in [ReactionKind.DISLIKE.name, ReactionKind.LIKE.name]:
- existing_reaction = (
- session.query(Reaction)
- .where(
- and_(
- Reaction.shout == reaction["shout"],
- Reaction.createdBy == auth.user_id,
- Reaction.kind == reaction["kind"],
- Reaction.replyTo == reaction.get("replyTo"),
- )
+ if reaction["kind"] in [
+ ReactionKind.DISLIKE.name,
+ ReactionKind.LIKE.name
+ ]:
+ existing_reaction = session.query(Reaction).where(
+ and_(
+ Reaction.shout == reaction["shout"],
+ Reaction.createdBy == auth.user_id,
+ Reaction.kind == reaction["kind"],
+ Reaction.replyTo == reaction.get("replyTo")
)
- .first()
- )
+ ).first()
if existing_reaction is not None:
raise OperationNotAllowed("You can't vote twice")
- opposite_reaction_kind = (
- ReactionKind.DISLIKE
- if reaction["kind"] == ReactionKind.LIKE.name
- else ReactionKind.LIKE
- )
- opposite_reaction = (
- session.query(Reaction)
- .where(
+ opposite_reaction_kind = ReactionKind.DISLIKE if reaction["kind"] == ReactionKind.LIKE.name else ReactionKind.LIKE
+ opposite_reaction = session.query(Reaction).where(
and_(
Reaction.shout == reaction["shout"],
Reaction.createdBy == auth.user_id,
Reaction.kind == opposite_reaction_kind,
- Reaction.replyTo == reaction.get("replyTo"),
+ Reaction.replyTo == reaction.get("replyTo")
)
- )
- .first()
- )
+ ).first()
if opposite_reaction is not None:
session.delete(opposite_reaction)
@@ -232,8 +221,8 @@ async def create_reaction(_, info, reaction):
await notification_service.handle_new_reaction(r.id)
rdict = r.dict()
- rdict["shout"] = shout.dict()
- rdict["createdBy"] = author.dict()
+ rdict['shout'] = shout.dict()
+ rdict['createdBy'] = author.dict()
# self-regulation mechanics
if check_to_hide(session, auth.user_id, r):
@@ -246,7 +235,11 @@ async def create_reaction(_, info, reaction):
except Exception as e:
print(f"[resolvers.reactions] error on reactions autofollowing: {e}")
- rdict["stat"] = {"commented": 0, "reacted": 0, "rating": 0}
+ rdict['stat'] = {
+ "commented": 0,
+ "reacted": 0,
+ "rating": 0
+ }
return {"reaction": rdict}
@@ -279,7 +272,7 @@ async def update_reaction(_, info, id, reaction={}):
r.stat = {
"commented": commented_stat,
"reacted": reacted_stat,
- "rating": rating_stat,
+ "rating": rating_stat
}
return {"reaction": r}
@@ -297,12 +290,17 @@ async def delete_reaction(_, info, id):
if r.createdBy != auth.user_id:
return {"error": "access denied"}
- if r.kind in [ReactionKind.LIKE, ReactionKind.DISLIKE]:
+ if r.kind in [
+ ReactionKind.LIKE,
+ ReactionKind.DISLIKE
+ ]:
session.delete(r)
else:
r.deletedAt = datetime.now(tz=timezone.utc)
session.commit()
- return {"reaction": r}
+ return {
+ "reaction": r
+ }
@query.field("loadReactionsBy")
@@ -323,10 +321,12 @@ async def load_reactions_by(_, _info, by, limit=50, offset=0):
:return: Reaction[]
"""
- q = (
- select(Reaction, User, Shout)
- .join(User, Reaction.createdBy == User.id)
- .join(Shout, Reaction.shout == Shout.id)
+ q = select(
+ Reaction, User, Shout
+ ).join(
+ User, Reaction.createdBy == User.id
+ ).join(
+ Shout, Reaction.shout == Shout.id
)
if by.get("shout"):
@@ -344,7 +344,7 @@ async def load_reactions_by(_, _info, by, limit=50, offset=0):
if by.get("comment"):
q = q.filter(func.length(Reaction.body) > 0)
- if len(by.get("search", "")) > 2:
+ if len(by.get('search', '')) > 2:
q = q.filter(Reaction.body.ilike(f'%{by["body"]}%'))
if by.get("days"):
@@ -352,9 +352,13 @@ async def load_reactions_by(_, _info, by, limit=50, offset=0):
q = q.filter(Reaction.createdAt > after)
order_way = asc if by.get("sort", "").startswith("-") else desc
- order_field = by.get("sort", "").replace("-", "") or Reaction.createdAt
+ order_field = by.get("sort", "").replace('-', '') or Reaction.createdAt
- q = q.group_by(Reaction.id, User.id, Shout.id).order_by(order_way(order_field))
+ q = q.group_by(
+ Reaction.id, User.id, Shout.id
+ ).order_by(
+ order_way(order_field)
+ )
q = add_reaction_stat_columns(q)
@@ -363,20 +367,13 @@ async def load_reactions_by(_, _info, by, limit=50, offset=0):
reactions = []
with local_session() as session:
- for [
- reaction,
- user,
- shout,
- reacted_stat,
- commented_stat,
- rating_stat,
- ] in session.execute(q):
+ for [reaction, user, shout, reacted_stat, commented_stat, rating_stat] in session.execute(q):
reaction.createdBy = user
reaction.shout = shout
reaction.stat = {
"rating": rating_stat,
"commented": commented_stat,
- "reacted": reacted_stat,
+ "reacted": reacted_stat
}
reaction.kind = reaction.kind.name
diff --git a/resolvers/zine/topics.py b/resolvers/zine/topics.py
index f24065cd..f354a7b4 100644
--- a/resolvers/zine/topics.py
+++ b/resolvers/zine/topics.py
@@ -1,24 +1,24 @@
+from sqlalchemy import and_, select, distinct, func
+from sqlalchemy.orm import aliased
+
from auth.authenticate import login_required
from base.orm import local_session
from base.resolvers import mutation, query
-from orm import User
-from orm.shout import ShoutAuthor, ShoutTopic
+from orm.shout import ShoutTopic, ShoutAuthor
from orm.topic import Topic, TopicFollower
-from sqlalchemy import and_, distinct, func, select
-from sqlalchemy.orm import aliased
+from orm import User
def add_topic_stat_columns(q):
aliased_shout_author = aliased(ShoutAuthor)
aliased_topic_follower = aliased(TopicFollower)
- q = (
- q.outerjoin(ShoutTopic, Topic.id == ShoutTopic.topic)
- .add_columns(func.count(distinct(ShoutTopic.shout)).label("shouts_stat"))
- .outerjoin(aliased_shout_author, ShoutTopic.shout == aliased_shout_author.shout)
- .add_columns(func.count(distinct(aliased_shout_author.user)).label("authors_stat"))
- .outerjoin(aliased_topic_follower)
- .add_columns(func.count(distinct(aliased_topic_follower.follower)).label("followers_stat"))
+ q = q.outerjoin(ShoutTopic, Topic.id == ShoutTopic.topic).add_columns(
+ func.count(distinct(ShoutTopic.shout)).label('shouts_stat')
+ ).outerjoin(aliased_shout_author, ShoutTopic.shout == aliased_shout_author.shout).add_columns(
+ func.count(distinct(aliased_shout_author.user)).label('authors_stat')
+ ).outerjoin(aliased_topic_follower).add_columns(
+ func.count(distinct(aliased_topic_follower.follower)).label('followers_stat')
)
q = q.group_by(Topic.id)
@@ -31,7 +31,7 @@ def add_stat(topic, stat_columns):
topic.stat = {
"shouts": shouts_stat,
"authors": authors_stat,
- "followers": followers_stat,
+ "followers": followers_stat
}
return topic
@@ -133,10 +133,12 @@ def topic_unfollow(user_id, slug):
try:
with local_session() as session:
sub = (
- session.query(TopicFollower)
- .join(Topic)
- .filter(and_(TopicFollower.follower == user_id, Topic.slug == slug))
- .first()
+ session.query(TopicFollower).join(Topic).filter(
+ and_(
+ TopicFollower.follower == user_id,
+ Topic.slug == slug
+ )
+ ).first()
)
if sub:
session.delete(sub)
diff --git a/server.py b/server.py
index a491c30d..753c60ae 100644
--- a/server.py
+++ b/server.py
@@ -1,45 +1,56 @@
-from settings import DEV_SERVER_PID_FILE_NAME, PORT
-
-import os
import sys
+import os
import uvicorn
+from settings import PORT, DEV_SERVER_PID_FILE_NAME
+
def exception_handler(exception_type, exception, traceback, debug_hook=sys.excepthook):
print("%s: %s" % (exception_type.__name__, exception))
log_settings = {
- "version": 1,
- "disable_existing_loggers": True,
- "formatters": {
- "default": {
- "()": "uvicorn.logging.DefaultFormatter",
- "fmt": "%(levelprefix)s %(message)s",
- "use_colors": None,
- },
- "access": {
- "()": "uvicorn.logging.AccessFormatter",
- "fmt": '%(levelprefix)s %(client_addr)s - "%(request_line)s" %(status_code)s',
+ 'version': 1,
+ 'disable_existing_loggers': True,
+ 'formatters': {
+ 'default': {
+ '()': 'uvicorn.logging.DefaultFormatter',
+ 'fmt': '%(levelprefix)s %(message)s',
+ 'use_colors': None
},
+ 'access': {
+ '()': 'uvicorn.logging.AccessFormatter',
+ 'fmt': '%(levelprefix)s %(client_addr)s - "%(request_line)s" %(status_code)s'
+ }
},
- "handlers": {
- "default": {
- "formatter": "default",
- "class": "logging.StreamHandler",
- "stream": "ext://sys.stderr",
+ 'handlers': {
+ 'default': {
+ 'formatter': 'default',
+ 'class': 'logging.StreamHandler',
+ 'stream': 'ext://sys.stderr'
},
- "access": {
- "formatter": "access",
- "class": "logging.StreamHandler",
- "stream": "ext://sys.stdout",
+ 'access': {
+ 'formatter': 'access',
+ 'class': 'logging.StreamHandler',
+ 'stream': 'ext://sys.stdout'
+ }
+ },
+ 'loggers': {
+ 'uvicorn': {
+ 'handlers': ['default'],
+ 'level': 'INFO'
},
- },
- "loggers": {
- "uvicorn": {"handlers": ["default"], "level": "INFO"},
- "uvicorn.error": {"level": "INFO", "handlers": ["default"], "propagate": True},
- "uvicorn.access": {"handlers": ["access"], "level": "INFO", "propagate": False},
- },
+ 'uvicorn.error': {
+ 'level': 'INFO',
+ 'handlers': ['default'],
+ 'propagate': True
+ },
+ 'uvicorn.access': {
+ 'handlers': ['access'],
+ 'level': 'INFO',
+ 'propagate': False
+ }
+ }
}
local_headers = [
@@ -47,8 +58,7 @@ local_headers = [
("Access-Control-Allow-Origin", "https://localhost:3000"),
(
"Access-Control-Allow-Headers",
- "DNT,User-Agent,X-Requested-With,If-Modified-Since,"
- + "Cache-Control,Content-Type,Range,Authorization",
+ "DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization",
),
("Access-Control-Expose-Headers", "Content-Length,Content-Range"),
("Access-Control-Allow-Credentials", "true"),
@@ -76,17 +86,15 @@ if __name__ == "__main__":
# log_config=log_settings,
log_level=None,
access_log=True,
- reload=want_reload,
+ reload=want_reload
) # , ssl_keyfile="discours.key", ssl_certfile="discours.crt")
elif x == "migrate":
from migration import process
-
print("MODE: MIGRATE")
process()
elif x == "bson":
from migration.bson2json import json_tables
-
print("MODE: BSON")
json_tables()
@@ -97,5 +105,5 @@ if __name__ == "__main__":
host="0.0.0.0",
port=PORT,
proxy_headers=True,
- server_header=True,
+ server_header=True
)
diff --git a/services/following.py b/services/following.py
index a2be6af4..8410eb2d 100644
--- a/services/following.py
+++ b/services/following.py
@@ -18,7 +18,12 @@ class Following:
class FollowingManager:
lock = asyncio.Lock()
- data = {"author": [], "topic": [], "shout": [], "chat": []}
+ data = {
+ 'author': [],
+ 'topic': [],
+ 'shout': [],
+ 'chat': []
+ }
@staticmethod
async def register(kind, uid):
@@ -34,13 +39,13 @@ class FollowingManager:
async def push(kind, payload):
try:
async with FollowingManager.lock:
- if kind == "chat":
- for chat in FollowingManager["chat"]:
+ if kind == 'chat':
+ for chat in FollowingManager['chat']:
if payload.message["chatId"] == chat.uid:
chat.queue.put_nowait(payload)
else:
for entity in FollowingManager[kind]:
- if payload.shout["createdBy"] == entity.uid:
+ if payload.shout['createdBy'] == entity.uid:
entity.queue.put_nowait(payload)
except Exception as e:
print(Exception(e))
diff --git a/services/main.py b/services/main.py
index 6397a5e5..10301b86 100644
--- a/services/main.py
+++ b/services/main.py
@@ -1,13 +1,13 @@
-from base.orm import local_session
from services.search import SearchService
from services.stat.viewed import ViewedStorage
+from base.orm import local_session
async def storages_init():
with local_session() as session:
- print("[main] initialize SearchService")
+ print('[main] initialize SearchService')
await SearchService.init(session)
- print("[main] SearchService initialized")
- print("[main] initialize storages")
+ print('[main] SearchService initialized')
+ print('[main] initialize storages')
await ViewedStorage.init()
- print("[main] storages initialized")
+ print('[main] storages initialized')
diff --git a/services/notifications/notification_service.py b/services/notifications/notification_service.py
index ade98763..7e92aa95 100644
--- a/services/notifications/notification_service.py
+++ b/services/notifications/notification_service.py
@@ -1,17 +1,21 @@
-from base.orm import local_session
+import asyncio
+import json
from datetime import datetime, timezone
-from orm import Notification, Reaction, Shout, User
+
+from sqlalchemy import and_
+
+from base.orm import local_session
+from orm import Reaction, Shout, Notification, User
from orm.notification import NotificationType
from orm.reaction import ReactionKind
from services.notifications.sse import connection_manager
-from sqlalchemy import and_
-
-import asyncio
-import json
def shout_to_shout_data(shout):
- return {"title": shout.title, "slug": shout.slug}
+ return {
+ "title": shout.title,
+ "slug": shout.slug
+ }
def user_to_user_data(user):
@@ -19,14 +23,14 @@ def user_to_user_data(user):
"id": user.id,
"name": user.name,
"slug": user.slug,
- "userpic": user.userpic,
+ "userpic": user.userpic
}
def update_prev_notification(notification, user, reaction):
notification_data = json.loads(notification.data)
- notification_data["users"] = [u for u in notification_data["users"] if u["id"] != user.id]
+ notification_data["users"] = [u for u in notification_data["users"] if u['id'] != user.id]
notification_data["users"].append(user_to_user_data(user))
if notification_data["reactionIds"] is None:
@@ -53,45 +57,34 @@ class NewReactionNotificator:
if reaction.kind == ReactionKind.COMMENT:
parent_reaction = None
if reaction.replyTo:
- parent_reaction = (
- session.query(Reaction).where(Reaction.id == reaction.replyTo).one()
- )
+ parent_reaction = session.query(Reaction).where(Reaction.id == reaction.replyTo).one()
if parent_reaction.createdBy != reaction.createdBy:
- prev_new_reply_notification = (
- session.query(Notification)
- .where(
- and_(
- Notification.user == shout.createdBy,
- Notification.type == NotificationType.NEW_REPLY,
- Notification.shout == shout.id,
- Notification.reaction == parent_reaction.id,
- Notification.seen == False, # noqa: E712
- )
+ prev_new_reply_notification = session.query(Notification).where(
+ and_(
+ Notification.user == shout.createdBy,
+ Notification.type == NotificationType.NEW_REPLY,
+ Notification.shout == shout.id,
+ Notification.reaction == parent_reaction.id,
+ Notification.seen == False
)
- .first()
- )
+ ).first()
if prev_new_reply_notification:
update_prev_notification(prev_new_reply_notification, user, reaction)
else:
- reply_notification_data = json.dumps(
- {
- "shout": shout_to_shout_data(shout),
- "users": [user_to_user_data(user)],
- "reactionIds": [reaction.id],
- },
- ensure_ascii=False,
- )
+ reply_notification_data = json.dumps({
+ "shout": shout_to_shout_data(shout),
+ "users": [user_to_user_data(user)],
+ "reactionIds": [reaction.id]
+ }, ensure_ascii=False)
- reply_notification = Notification.create(
- **{
- "user": parent_reaction.createdBy,
- "type": NotificationType.NEW_REPLY,
- "shout": shout.id,
- "reaction": parent_reaction.id,
- "data": reply_notification_data,
- }
- )
+ reply_notification = Notification.create(**{
+ "user": parent_reaction.createdBy,
+ "type": NotificationType.NEW_REPLY,
+ "shout": shout.id,
+ "reaction": parent_reaction.id,
+ "data": reply_notification_data
+ })
session.add(reply_notification)
@@ -100,39 +93,30 @@ class NewReactionNotificator:
if reaction.createdBy != shout.createdBy and (
parent_reaction is None or parent_reaction.createdBy != shout.createdBy
):
- prev_new_comment_notification = (
- session.query(Notification)
- .where(
- and_(
- Notification.user == shout.createdBy,
- Notification.type == NotificationType.NEW_COMMENT,
- Notification.shout == shout.id,
- Notification.seen == False, # noqa: E712
- )
+ prev_new_comment_notification = session.query(Notification).where(
+ and_(
+ Notification.user == shout.createdBy,
+ Notification.type == NotificationType.NEW_COMMENT,
+ Notification.shout == shout.id,
+ Notification.seen == False
)
- .first()
- )
+ ).first()
if prev_new_comment_notification:
update_prev_notification(prev_new_comment_notification, user, reaction)
else:
- notification_data_string = json.dumps(
- {
- "shout": shout_to_shout_data(shout),
- "users": [user_to_user_data(user)],
- "reactionIds": [reaction.id],
- },
- ensure_ascii=False,
- )
+ notification_data_string = json.dumps({
+ "shout": shout_to_shout_data(shout),
+ "users": [user_to_user_data(user)],
+ "reactionIds": [reaction.id]
+ }, ensure_ascii=False)
- author_notification = Notification.create(
- **{
- "user": shout.createdBy,
- "type": NotificationType.NEW_COMMENT,
- "shout": shout.id,
- "data": notification_data_string,
- }
- )
+ author_notification = Notification.create(**{
+ "user": shout.createdBy,
+ "type": NotificationType.NEW_COMMENT,
+ "shout": shout.id,
+ "data": notification_data_string
+ })
session.add(author_notification)
@@ -158,7 +142,7 @@ class NotificationService:
try:
await notificator.run()
except Exception as e:
- print(f"[NotificationService.worker] error: {str(e)}")
+ print(f'[NotificationService.worker] error: {str(e)}')
notification_service = NotificationService()
diff --git a/services/notifications/sse.py b/services/notifications/sse.py
index 23352344..085dbde0 100644
--- a/services/notifications/sse.py
+++ b/services/notifications/sse.py
@@ -1,8 +1,8 @@
+import json
+
from sse_starlette.sse import EventSourceResponse
from starlette.requests import Request
-
import asyncio
-import json
class ConnectionManager:
@@ -28,7 +28,9 @@ class ConnectionManager:
return
for connection in self.connections_by_user_id[user_id]:
- data = {"type": "newNotifications"}
+ data = {
+ "type": "newNotifications"
+ }
data_string = json.dumps(data, ensure_ascii=False)
await connection.put(data_string)
diff --git a/services/search.py b/services/search.py
index ffcd32b5..834e5bf7 100644
--- a/services/search.py
+++ b/services/search.py
@@ -1,10 +1,9 @@
+import asyncio
+import json
from base.redis import redis
from orm.shout import Shout
from resolvers.zine.load import load_shouts_by
-import asyncio
-import json
-
class SearchService:
lock = asyncio.Lock()
@@ -13,7 +12,7 @@ class SearchService:
@staticmethod
async def init(session):
async with SearchService.lock:
- print("[search.service] did nothing")
+ print('[search.service] did nothing')
SearchService.cache = {}
@staticmethod
@@ -25,7 +24,7 @@ class SearchService:
"title": text,
"body": text,
"limit": limit,
- "offset": offset,
+ "offset": offset
}
payload = await load_shouts_by(None, None, options)
await redis.execute("SET", text, json.dumps(payload))
diff --git a/services/stat/viewed.py b/services/stat/viewed.py
index ce5070b2..905ade43 100644
--- a/services/stat/viewed.py
+++ b/services/stat/viewed.py
@@ -1,17 +1,18 @@
-from base.orm import local_session
-from datetime import datetime, timedelta, timezone
-from gql import Client, gql
-from gql.transport.aiohttp import AIOHTTPTransport
-from orm import Topic
-from orm.shout import Shout, ShoutTopic
+import asyncio
+import time
+from datetime import timedelta, timezone, datetime
from os import environ, path
from ssl import create_default_context
-import asyncio
-import time
+from gql import Client, gql
+from gql.transport.aiohttp import AIOHTTPTransport
+from sqlalchemy import func
-load_facts = gql(
- """
+from base.orm import local_session
+from orm import User, Topic
+from orm.shout import ShoutTopic, Shout
+
+load_facts = gql("""
query getDomains {
domains {
id
@@ -24,11 +25,9 @@ query getDomains {
}
}
}
-"""
-)
+""")
-load_pages = gql(
- """
+load_pages = gql("""
query getDomains {
domains {
title
@@ -42,9 +41,8 @@ query getDomains {
}
}
}
-"""
-)
-schema_str = open(path.dirname(__file__) + "/ackee.graphql").read()
+""")
+schema_str = open(path.dirname(__file__) + '/ackee.graphql').read()
token = environ.get("ACKEE_TOKEN", "")
@@ -54,8 +52,8 @@ def create_client(headers=None, schema=None):
transport=AIOHTTPTransport(
url="https://ackee.discours.io/api",
ssl=create_default_context(),
- headers=headers,
- ),
+ headers=headers
+ )
)
@@ -73,13 +71,13 @@ class ViewedStorage:
@staticmethod
async def init():
- """graphql client connection using permanent token"""
+ """ graphql client connection using permanent token """
self = ViewedStorage
async with self.lock:
if token:
- self.client = create_client(
- {"Authorization": "Bearer %s" % str(token)}, schema=schema_str
- )
+ self.client = create_client({
+ "Authorization": "Bearer %s" % str(token)
+ }, schema=schema_str)
print("[stat.viewed] * authorized permanentely by ackee.discours.io: %s" % token)
else:
print("[stat.viewed] * please set ACKEE_TOKEN")
@@ -87,7 +85,7 @@ class ViewedStorage:
@staticmethod
async def update_pages():
- """query all the pages from ackee sorted by views count"""
+ """ query all the pages from ackee sorted by views count """
print("[stat.viewed] ⎧ updating ackee pages data ---")
start = time.time()
self = ViewedStorage
@@ -98,7 +96,7 @@ class ViewedStorage:
try:
for page in self.pages:
p = page["value"].split("?")[0]
- slug = p.split("discours.io/")[-1]
+ slug = p.split('discours.io/')[-1]
shouts[slug] = page["count"]
for slug in shouts.keys():
await ViewedStorage.increment(slug, shouts[slug])
@@ -120,7 +118,7 @@ class ViewedStorage:
# unused yet
@staticmethod
async def get_shout(shout_slug):
- """getting shout views metric by slug"""
+ """ getting shout views metric by slug """
self = ViewedStorage
async with self.lock:
shout_views = self.by_shouts.get(shout_slug)
@@ -138,7 +136,7 @@ class ViewedStorage:
@staticmethod
async def get_topic(topic_slug):
- """getting topic views value summed"""
+ """ getting topic views value summed """
self = ViewedStorage
topic_views = 0
async with self.lock:
@@ -148,28 +146,24 @@ class ViewedStorage:
@staticmethod
def update_topics(session, shout_slug):
- """updates topics counters by shout slug"""
+ """ updates topics counters by shout slug """
self = ViewedStorage
- for [shout_topic, topic] in (
- session.query(ShoutTopic, Topic)
- .join(Topic)
- .join(Shout)
- .where(Shout.slug == shout_slug)
- .all()
- ):
+ for [shout_topic, topic] in session.query(ShoutTopic, Topic).join(Topic).join(Shout).where(
+ Shout.slug == shout_slug
+ ).all():
if not self.by_topics.get(topic.slug):
self.by_topics[topic.slug] = {}
self.by_topics[topic.slug][shout_slug] = self.by_shouts[shout_slug]
@staticmethod
- async def increment(shout_slug, amount=1, viewer="ackee"):
- """the only way to change views counter"""
+ async def increment(shout_slug, amount=1, viewer='ackee'):
+ """ the only way to change views counter """
self = ViewedStorage
async with self.lock:
# TODO optimize, currenty we execute 1 DB transaction per shout
with local_session() as session:
shout = session.query(Shout).where(Shout.slug == shout_slug).one()
- if viewer == "old-discours":
+ if viewer == 'old-discours':
# this is needed for old db migration
if shout.viewsOld == amount:
print(f"viewsOld amount: {amount}")
@@ -191,7 +185,7 @@ class ViewedStorage:
@staticmethod
async def worker():
- """async task worker"""
+ """ async task worker """
failed = 0
self = ViewedStorage
if self.disabled:
@@ -211,10 +205,9 @@ class ViewedStorage:
if failed == 0:
when = datetime.now(timezone.utc) + timedelta(seconds=self.period)
t = format(when.astimezone().isoformat())
- print(
- "[stat.viewed] ⎩ next update: %s"
- % (t.split("T")[0] + " " + t.split("T")[1].split(".")[0])
- )
+ print("[stat.viewed] ⎩ next update: %s" % (
+ t.split("T")[0] + " " + t.split("T")[1].split(".")[0]
+ ))
await asyncio.sleep(self.period)
else:
await asyncio.sleep(10)
diff --git a/services/zine/gittask.py b/services/zine/gittask.py
index 6c6ce440..31e55025 100644
--- a/services/zine/gittask.py
+++ b/services/zine/gittask.py
@@ -1,8 +1,8 @@
-from pathlib import Path
-from settings import SHOUTS_REPO
-
import asyncio
import subprocess
+from pathlib import Path
+
+from settings import SHOUTS_REPO
class GitTask:
diff --git a/settings.py b/settings.py
index f3da9952..270b4551 100644
--- a/settings.py
+++ b/settings.py
@@ -3,9 +3,8 @@ from os import environ
PORT = 8080
DB_URL = (
- environ.get("DATABASE_URL")
- or environ.get("DB_URL")
- or "postgresql://postgres@localhost:5432/discoursio"
+ environ.get("DATABASE_URL") or environ.get("DB_URL") or
+ "postgresql://postgres@localhost:5432/discoursio"
)
JWT_ALGORITHM = "HS256"
JWT_SECRET_KEY = environ.get("JWT_SECRET_KEY") or "8f1bd7696ffb482d8486dfbc6e7d16dd-secret-key"
@@ -31,4 +30,4 @@ SENTRY_DSN = environ.get("SENTRY_DSN")
SESSION_SECRET_KEY = environ.get("SESSION_SECRET_KEY") or "!secret"
# for local development
-DEV_SERVER_PID_FILE_NAME = "dev-server.pid"
+DEV_SERVER_PID_FILE_NAME = 'dev-server.pid'
diff --git a/setup.cfg b/setup.cfg
old mode 100644
new mode 100755
index e3db2ef9..588918a1
--- a/setup.cfg
+++ b/setup.cfg
@@ -9,16 +9,15 @@ force_alphabetical_sort = false
[tool:brunette]
# https://github.com/odwyersoftware/brunette
-line-length = 100
+line-length = 120
single-quotes = false
[flake8]
# https://github.com/PyCQA/flake8
exclude = .git,__pycache__,.mypy_cache,.vercel
-max-line-length = 100
-max-complexity = 10
+max-line-length = 120
+max-complexity = 15
select = B,C,E,F,W,T4,B9
-# FIXME
# E203: Whitespace before ':'
# E266: Too many leading '#' for block comment
# E501: Line too long (82 > 79 characters)
diff --git a/setup.cfg.bak b/setup.cfg.bak
deleted file mode 100644
index 588918a1..00000000
--- a/setup.cfg.bak
+++ /dev/null
@@ -1,39 +0,0 @@
-[isort]
-# https://github.com/PyCQA/isort
-line_length = 120
-multi_line_output = 3
-include_trailing_comma = true
-force_grid_wrap = 0
-use_parentheses = true
-force_alphabetical_sort = false
-
-[tool:brunette]
-# https://github.com/odwyersoftware/brunette
-line-length = 120
-single-quotes = false
-
-[flake8]
-# https://github.com/PyCQA/flake8
-exclude = .git,__pycache__,.mypy_cache,.vercel
-max-line-length = 120
-max-complexity = 15
-select = B,C,E,F,W,T4,B9
-# E203: Whitespace before ':'
-# E266: Too many leading '#' for block comment
-# E501: Line too long (82 > 79 characters)
-# E722: Do not use bare except, specify exception instead
-# W503: Line break occurred before a binary operator
-# F403: 'from module import *' used; unable to detect undefined names
-# C901: Function is too complex
-ignore = E203,E266,E501,E722,W503,F403,C901
-
-[mypy]
-# https://github.com/python/mypy
-ignore_missing_imports = true
-warn_return_any = false
-warn_unused_configs = true
-disallow_untyped_calls = true
-disallow_untyped_defs = true
-disallow_incomplete_defs = true
-[mypy-api.*]
-ignore_errors = true
diff --git a/validations/auth.py b/validations/auth.py
index 59c49bd4..216d7dcb 100644
--- a/validations/auth.py
+++ b/validations/auth.py
@@ -1,5 +1,5 @@
-from pydantic import BaseModel
from typing import Optional, Text
+from pydantic import BaseModel
class AuthInput(BaseModel):
diff --git a/validations/inbox.py b/validations/inbox.py
index d864ed67..d03cca05 100644
--- a/validations/inbox.py
+++ b/validations/inbox.py
@@ -1,5 +1,5 @@
+from typing import Optional, Text, List
from pydantic import BaseModel
-from typing import List, Optional, Text
class Message(BaseModel):
From 441bcc1e90f8c84a1da07fd1fcfb46179b973c59 Mon Sep 17 00:00:00 2001
From: Igor Lobanov
Date: Mon, 30 Oct 2023 22:00:55 +0100
Subject: [PATCH 09/27] configured isort, black, flake8
---
.flake8 | 6 -
.github/workflows/checks.yml | 16 +
.pre-commit-config.yaml | 24 +-
Procfile | 1 -
README.md | 1 -
ai/preprocess.py | 75 --
alembic/env.py | 10 +-
alembic/versions/fe943b098418_init_alembic.py | 8 +-
auth/authenticate.py | 50 +-
auth/credentials.py | 12 +-
auth/email.py | 23 +-
auth/identity.py | 29 +-
auth/jwtcodec.py | 22 +-
auth/oauth.py | 3 +-
auth/tokenstorage.py | 6 +-
base/exceptions.py | 2 +-
base/orm.py | 17 +-
base/redis.py | 4 +-
generate_gql_types.sh | 1 +
lint.sh | 18 +-
main.py | 26 +-
migrate.sh | 1 -
migration/__init__.py | 57 +-
migration/bson2json.py | 7 +-
migration/export.py | 38 +-
migration/extract.py | 358 ++---
migration/html2text/__init__.py | 73 +-
migration/html2text/cli.py | 9 +-
migration/html2text/utils.py | 33 +-
migration/tables/__init__.py | 1 -
migration/tables/comments.py | 89 +-
migration/tables/content_items.py | 112 +-
migration/tables/remarks.py | 73 +-
migration/tables/topics.py | 3 +-
migration/tables/users.py | 35 +-
orm/__init__.py | 4 +-
orm/collection.py | 2 +-
orm/community.py | 23 +-
orm/notification.py | 11 +-
orm/rbac.py | 49 +-
orm/reaction.py | 18 +-
orm/shout.py | 45 +-
orm/topic.py | 14 +-
orm/user.py | 31 +-
pyproject.toml | 2 +
requirements-dev.txt | 12 +-
requirements.txt | 59 +-
resetdb.sh | 1 -
resolvers/__init__.py | 89 +-
resolvers/auth.py | 51 +-
resolvers/create/editor.py | 80 +-
resolvers/create/migrate.py | 11 -
resolvers/inbox/chats.py | 49 +-
resolvers/inbox/load.py | 98 +-
resolvers/inbox/messages.py | 61 +-
resolvers/inbox/search.py | 61 +-
resolvers/notifications.py | 56 +-
resolvers/upload.py | 40 +-
resolvers/zine/following.py | 47 +-
resolvers/zine/load.py | 145 +--
resolvers/zine/profile.py | 84 +-
resolvers/zine/reactions.py | 204 ++-
resolvers/zine/topics.py | 41 +-
schema_types.py | 1157 +++++++++++++++++
server.py | 83 +-
services/following.py | 13 +-
services/main.py | 10 +-
.../notifications/notification_service.py | 116 +-
services/notifications/sse.py | 6 +-
services/search.py | 17 +-
services/stat/viewed.py | 70 +-
settings.py | 7 +-
setup.cfg | 35 +-
validations/auth.py | 1 +
validations/inbox.py | 4 +-
75 files changed, 2420 insertions(+), 1730 deletions(-)
delete mode 100644 .flake8
create mode 100644 .github/workflows/checks.yml
delete mode 100644 ai/preprocess.py
create mode 100755 generate_gql_types.sh
delete mode 100644 migration/tables/__init__.py
create mode 100644 pyproject.toml
delete mode 100644 resolvers/create/migrate.py
create mode 100644 schema_types.py
diff --git a/.flake8 b/.flake8
deleted file mode 100644
index e82de95a..00000000
--- a/.flake8
+++ /dev/null
@@ -1,6 +0,0 @@
-[flake8]
-ignore = E203,W504,W191,W503
-exclude = .git,__pycache__,orm/rbac.py
-max-complexity = 10
-max-line-length = 108
-indent-string = ' '
diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml
new file mode 100644
index 00000000..c80dd279
--- /dev/null
+++ b/.github/workflows/checks.yml
@@ -0,0 +1,16 @@
+name: Checks
+on: [pull_request]
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ name: Checks
+ steps:
+ - uses: actions/checkout@v2
+ - uses: actions/setup-python@v2
+ with:
+ python-version: 3.10
+ - run: pip install --upgrade pip
+ - run: pip install -r requirements.txt
+ - run: pip install -r requirements-dev.txt
+ - run: check.sh
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index af489f3a..09ad4e40 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -6,11 +6,11 @@ exclude: |
)
default_language_version:
- python: python3.8
+ python: python3.10
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
- rev: v3.2.0
+ rev: v4.5.0
hooks:
- id: check-added-large-files
- id: check-case-conflict
@@ -21,24 +21,24 @@ repos:
- id: check-yaml
- id: end-of-file-fixer
- id: trailing-whitespace
+ - id: requirements-txt-fixer
- repo: https://github.com/timothycrosley/isort
- rev: 5.5.3
+ rev: 5.12.0
hooks:
- id: isort
- repo: https://github.com/ambv/black
- rev: 20.8b1
+ rev: 23.10.1
hooks:
- id: black
- args:
- - --line-length=100
- - --skip-string-normalization
- - repo: https://gitlab.com/pycqa/flake8
- rev: 3.8.3
+ - repo: https://github.com/PyCQA/flake8
+ rev: 6.1.0
hooks:
- id: flake8
- args:
- - --max-line-length=100
- - --disable=protected-access
+
+# - repo: https://github.com/python/mypy
+# rev: v1.6.1
+# hooks:
+# - id: mypy
diff --git a/Procfile b/Procfile
index c5c1bfa8..ac9d762f 100644
--- a/Procfile
+++ b/Procfile
@@ -1,2 +1 @@
web: python server.py
-
diff --git a/README.md b/README.md
index 1a1ee0a4..7081fbca 100644
--- a/README.md
+++ b/README.md
@@ -42,4 +42,3 @@ Put the header 'Authorization' with token from signIn query or registerUser muta
# How to debug Ackee
Set ACKEE_TOKEN var
-
diff --git a/ai/preprocess.py b/ai/preprocess.py
deleted file mode 100644
index afd8dbd8..00000000
--- a/ai/preprocess.py
+++ /dev/null
@@ -1,75 +0,0 @@
-import re
-import nltk
-from bs4 import BeautifulSoup
-from nltk.corpus import stopwords
-from pymystem3 import Mystem
-from string import punctuation
-from transformers import BertTokenizer
-
-nltk.download("stopwords")
-
-
-def get_clear_text(text):
- soup = BeautifulSoup(text, 'html.parser')
-
- # extract the plain text from the HTML document without tags
- clear_text = ''
- for tag in soup.find_all():
- clear_text += tag.string or ''
-
- clear_text = re.sub(pattern='[\u202F\u00A0\n]+', repl=' ', string=clear_text)
-
- # only words
- clear_text = re.sub(pattern='[^A-ZА-ЯЁ -]', repl='', string=clear_text, flags=re.IGNORECASE)
-
- clear_text = re.sub(pattern='\s+', repl=' ', string=clear_text)
-
- clear_text = clear_text.lower()
-
- mystem = Mystem()
- russian_stopwords = stopwords.words("russian")
-
- tokens = mystem.lemmatize(clear_text)
- tokens = [token for token in tokens if token not in russian_stopwords \
- and token != " " \
- and token.strip() not in punctuation]
-
- clear_text = " ".join(tokens)
-
- return clear_text
-
-
-# if __name__ == '__main__':
-#
-# # initialize the tokenizer with the pre-trained BERT model and vocabulary
-# tokenizer = BertTokenizer.from_pretrained('bert-base-multilingual-cased')
-#
-# # split each text into smaller segments of maximum length 512
-# max_length = 512
-# segmented_texts = []
-# for text in [clear_text1, clear_text2]:
-# segmented_text = []
-# for i in range(0, len(text), max_length):
-# segment = text[i:i+max_length]
-# segmented_text.append(segment)
-# segmented_texts.append(segmented_text)
-#
-# # tokenize each segment using the BERT tokenizer
-# tokenized_texts = []
-# for segmented_text in segmented_texts:
-# tokenized_text = []
-# for segment in segmented_text:
-# segment_tokens = tokenizer.tokenize(segment)
-# segment_tokens = ['[CLS]'] + segment_tokens + ['[SEP]']
-# tokenized_text.append(segment_tokens)
-# tokenized_texts.append(tokenized_text)
-#
-# input_ids = []
-# for tokenized_text in tokenized_texts:
-# input_id = []
-# for segment_tokens in tokenized_text:
-# segment_id = tokenizer.convert_tokens_to_ids(segment_tokens)
-# input_id.append(segment_id)
-# input_ids.append(input_id)
-#
-# print(input_ids)
diff --git a/alembic/env.py b/alembic/env.py
index c6d69a97..3256b308 100644
--- a/alembic/env.py
+++ b/alembic/env.py
@@ -1,10 +1,9 @@
from logging.config import fileConfig
-from sqlalchemy import engine_from_config
-from sqlalchemy import pool
+from sqlalchemy import engine_from_config, pool
from alembic import context
-
+from base.orm import Base
from settings import DB_URL
# this is the Alembic Config object, which provides
@@ -19,7 +18,6 @@ config.set_section_option(config.config_ini_section, "DB_URL", DB_URL)
if config.config_file_name is not None:
fileConfig(config.config_file_name)
-from base.orm import Base
target_metadata = [Base.metadata]
# other values from the config, defined by the needs of env.py,
@@ -66,9 +64,7 @@ def run_migrations_online() -> None:
)
with connectable.connect() as connection:
- context.configure(
- connection=connection, target_metadata=target_metadata
- )
+ context.configure(connection=connection, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
diff --git a/alembic/versions/fe943b098418_init_alembic.py b/alembic/versions/fe943b098418_init_alembic.py
index 4ec6d519..52796fea 100644
--- a/alembic/versions/fe943b098418_init_alembic.py
+++ b/alembic/versions/fe943b098418_init_alembic.py
@@ -1,18 +1,18 @@
"""init alembic
Revision ID: fe943b098418
-Revises:
+Revises:
Create Date: 2023-08-19 01:37:57.031933
"""
from typing import Sequence, Union
-from alembic import op
-import sqlalchemy as sa
+# import sqlalchemy as sa
+# from alembic import op
# revision identifiers, used by Alembic.
-revision: str = 'fe943b098418'
+revision: str = "fe943b098418"
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
diff --git a/auth/authenticate.py b/auth/authenticate.py
index be4db2d2..aa5b308f 100644
--- a/auth/authenticate.py
+++ b/auth/authenticate.py
@@ -2,75 +2,71 @@ from functools import wraps
from typing import Optional, Tuple
from graphql.type import GraphQLResolveInfo
-from sqlalchemy.orm import joinedload, exc
+from sqlalchemy.orm import exc, joinedload
from starlette.authentication import AuthenticationBackend
from starlette.requests import HTTPConnection
from auth.credentials import AuthCredentials, AuthUser
-from base.orm import local_session
-from orm.user import User, Role
-
-from settings import SESSION_TOKEN_HEADER
from auth.tokenstorage import SessionToken
from base.exceptions import OperationNotAllowed
+from base.orm import local_session
+from orm.user import Role, User
+from settings import SESSION_TOKEN_HEADER
class JWTAuthenticate(AuthenticationBackend):
async def authenticate(
self, request: HTTPConnection
) -> Optional[Tuple[AuthCredentials, AuthUser]]:
-
if SESSION_TOKEN_HEADER not in request.headers:
- return AuthCredentials(scopes={}), AuthUser(user_id=None, username='')
+ return AuthCredentials(scopes={}), AuthUser(user_id=None, username="")
token = request.headers.get(SESSION_TOKEN_HEADER)
if not token:
print("[auth.authenticate] no token in header %s" % SESSION_TOKEN_HEADER)
return AuthCredentials(scopes={}, error_message=str("no token")), AuthUser(
- user_id=None, username=''
+ user_id=None, username=""
)
- if len(token.split('.')) > 1:
+ if len(token.split(".")) > 1:
payload = await SessionToken.verify(token)
with local_session() as session:
try:
user = (
- session.query(User).options(
+ session.query(User)
+ .options(
joinedload(User.roles).options(joinedload(Role.permissions)),
- joinedload(User.ratings)
- ).filter(
- User.id == payload.user_id
- ).one()
+ joinedload(User.ratings),
+ )
+ .filter(User.id == payload.user_id)
+ .one()
)
scopes = {} # TODO: integrate await user.get_permission()
return (
- AuthCredentials(
- user_id=payload.user_id,
- scopes=scopes,
- logged_in=True
- ),
- AuthUser(user_id=user.id, username=''),
+ AuthCredentials(user_id=payload.user_id, scopes=scopes, logged_in=True),
+ AuthUser(user_id=user.id, username=""),
)
except exc.NoResultFound:
pass
- return AuthCredentials(scopes={}, error_message=str('Invalid token')), AuthUser(user_id=None, username='')
+ return AuthCredentials(scopes={}, error_message=str("Invalid token")), AuthUser(
+ user_id=None, username=""
+ )
def login_required(func):
@wraps(func)
async def wrap(parent, info: GraphQLResolveInfo, *args, **kwargs):
- # print('[auth.authenticate] login required for %r with info %r' % (func, info)) # debug only
+ # debug only
+ # print('[auth.authenticate] login required for %r with info %r' % (func, info))
auth: AuthCredentials = info.context["request"].auth
# print(auth)
if not auth or not auth.logged_in:
# raise Unauthorized(auth.error_message or "Please login")
- return {
- "error": "Please login first"
- }
+ return {"error": "Please login first"}
return await func(parent, info, *args, **kwargs)
return wrap
@@ -79,7 +75,9 @@ def login_required(func):
def permission_required(resource, operation, func):
@wraps(func)
async def wrap(parent, info: GraphQLResolveInfo, *args, **kwargs):
- print('[auth.authenticate] permission_required for %r with info %r' % (func, info)) # debug only
+ print(
+ "[auth.authenticate] permission_required for %r with info %r" % (func, info)
+ ) # debug only
auth: AuthCredentials = info.context["request"].auth
if not auth.logged_in:
raise OperationNotAllowed(auth.error_message or "Please login")
diff --git a/auth/credentials.py b/auth/credentials.py
index 9045b7a4..3d7d5a36 100644
--- a/auth/credentials.py
+++ b/auth/credentials.py
@@ -23,13 +23,11 @@ class AuthCredentials(BaseModel):
async def permissions(self) -> List[Permission]:
if self.user_id is None:
# raise Unauthorized("Please login first")
- return {
- "error": "Please login first"
- }
+ return {"error": "Please login first"}
else:
# TODO: implement permissions logix
print(self.user_id)
- return NotImplemented()
+ return NotImplemented
class AuthUser(BaseModel):
@@ -40,6 +38,6 @@ class AuthUser(BaseModel):
def is_authenticated(self) -> bool:
return self.user_id is not None
- @property
- def display_id(self) -> int:
- return self.user_id
+ # @property
+ # def display_id(self) -> int:
+ # return self.user_id
diff --git a/auth/email.py b/auth/email.py
index 7ca5d9bf..a42cf1f7 100644
--- a/auth/email.py
+++ b/auth/email.py
@@ -2,19 +2,16 @@ import requests
from settings import MAILGUN_API_KEY, MAILGUN_DOMAIN
-api_url = "https://api.mailgun.net/v3/%s/messages" % (MAILGUN_DOMAIN or 'discours.io')
-noreply = "discours.io " % (MAILGUN_DOMAIN or 'discours.io')
-lang_subject = {
- "ru": "Подтверждение почты",
- "en": "Confirm email"
-}
+api_url = "https://api.mailgun.net/v3/%s/messages" % (MAILGUN_DOMAIN or "discours.io")
+noreply = "discours.io " % (MAILGUN_DOMAIN or "discours.io")
+lang_subject = {"ru": "Подтверждение почты", "en": "Confirm email"}
async def send_auth_email(user, token, lang="ru", template="email_confirmation"):
try:
to = "%s <%s>" % (user.name, user.email)
- if lang not in ['ru', 'en']:
- lang = 'ru'
+ if lang not in ["ru", "en"]:
+ lang = "ru"
subject = lang_subject.get(lang, lang_subject["en"])
template = template + "_" + lang
payload = {
@@ -22,16 +19,12 @@ async def send_auth_email(user, token, lang="ru", template="email_confirmation")
"to": to,
"subject": subject,
"template": template,
- "h:X-Mailgun-Variables": "{ \"token\": \"%s\" }" % token
+ "h:X-Mailgun-Variables": '{ "token": "%s" }' % token,
}
- print('[auth.email] payload: %r' % payload)
+ print("[auth.email] payload: %r" % payload)
# debug
# print('http://localhost:3000/?modal=auth&mode=confirm-email&token=%s' % token)
- response = requests.post(
- api_url,
- auth=("api", MAILGUN_API_KEY),
- data=payload
- )
+ response = requests.post(api_url, auth=("api", MAILGUN_API_KEY), data=payload)
response.raise_for_status()
except Exception as e:
print(e)
diff --git a/auth/identity.py b/auth/identity.py
index e4b78040..7f704eff 100644
--- a/auth/identity.py
+++ b/auth/identity.py
@@ -7,6 +7,7 @@ from sqlalchemy import or_
from auth.jwtcodec import JWTCodec
from auth.tokenstorage import TokenStorage
+
# from base.exceptions import InvalidPassword, InvalidToken
from base.orm import local_session
from orm import User
@@ -34,7 +35,7 @@ class Password:
Verify that password hash is equal to specified hash. Hash format:
$2a$10$Ro0CUfOqk6cXEKf3dyaM7OhSCvnwM9s4wIX9JeLapehKK5YdLxKcm
- \__/\/ \____________________/\_____________________________/
+ \__/\/ \____________________/\_____________________________/ # noqa: W605
| | Salt Hash
| Cost
Version
@@ -57,14 +58,10 @@ class Identity:
user = User(**orm_user.dict())
if not user.password:
# raise InvalidPassword("User password is empty")
- return {
- "error": "User password is empty"
- }
+ return {"error": "User password is empty"}
if not Password.verify(password, user.password):
# raise InvalidPassword("Wrong user password")
- return {
- "error": "Wrong user password"
- }
+ return {"error": "Wrong user password"}
return user
@staticmethod
@@ -87,30 +84,22 @@ class Identity:
@staticmethod
async def onetime(token: str) -> User:
try:
- print('[auth.identity] using one time token')
+ print("[auth.identity] using one time token")
payload = JWTCodec.decode(token)
if not await TokenStorage.exist(f"{payload.user_id}-{payload.username}-{token}"):
# raise InvalidToken("Login token has expired, please login again")
- return {
- "error": "Token has expired"
- }
+ return {"error": "Token has expired"}
except ExpiredSignatureError:
# raise InvalidToken("Login token has expired, please try again")
- return {
- "error": "Token has expired"
- }
+ return {"error": "Token has expired"}
except DecodeError:
# raise InvalidToken("token format error") from e
- return {
- "error": "Token format error"
- }
+ return {"error": "Token format error"}
with local_session() as session:
user = session.query(User).filter_by(id=payload.user_id).first()
if not user:
# raise Exception("user not exist")
- return {
- "error": "User does not exist"
- }
+ return {"error": "User does not exist"}
if not user.emailConfirmed:
user.emailConfirmed = True
session.commit()
diff --git a/auth/jwtcodec.py b/auth/jwtcodec.py
index ac561adb..d03451f7 100644
--- a/auth/jwtcodec.py
+++ b/auth/jwtcodec.py
@@ -1,8 +1,10 @@
from datetime import datetime, timezone
+
import jwt
+
from base.exceptions import ExpiredToken, InvalidToken
-from validations.auth import TokenPayload, AuthInput
from settings import JWT_ALGORITHM, JWT_SECRET_KEY
+from validations.auth import AuthInput, TokenPayload
class JWTCodec:
@@ -13,12 +15,12 @@ class JWTCodec:
"username": user.email or user.phone,
"exp": exp,
"iat": datetime.now(tz=timezone.utc),
- "iss": "discours"
+ "iss": "discours",
}
try:
return jwt.encode(payload, JWT_SECRET_KEY, JWT_ALGORITHM)
except Exception as e:
- print('[auth.jwtcodec] JWT encode error %r' % e)
+ print("[auth.jwtcodec] JWT encode error %r" % e)
@staticmethod
def decode(token: str, verify_exp: bool = True) -> TokenPayload:
@@ -33,18 +35,18 @@ class JWTCodec:
# "verify_signature": False
},
algorithms=[JWT_ALGORITHM],
- issuer="discours"
+ issuer="discours",
)
r = TokenPayload(**payload)
# print('[auth.jwtcodec] debug token %r' % r)
return r
except jwt.InvalidIssuedAtError:
- print('[auth.jwtcodec] invalid issued at: %r' % payload)
- raise ExpiredToken('check token issued time')
+ print("[auth.jwtcodec] invalid issued at: %r" % payload)
+ raise ExpiredToken("check token issued time")
except jwt.ExpiredSignatureError:
- print('[auth.jwtcodec] expired signature %r' % payload)
- raise ExpiredToken('check token lifetime')
+ print("[auth.jwtcodec] expired signature %r" % payload)
+ raise ExpiredToken("check token lifetime")
except jwt.InvalidTokenError:
- raise InvalidToken('token is not valid')
+ raise InvalidToken("token is not valid")
except jwt.InvalidSignatureError:
- raise InvalidToken('token is not valid')
+ raise InvalidToken("token is not valid")
diff --git a/auth/oauth.py b/auth/oauth.py
index 54b5f11a..02f56ff5 100644
--- a/auth/oauth.py
+++ b/auth/oauth.py
@@ -1,8 +1,9 @@
from authlib.integrations.starlette_client import OAuth
from starlette.responses import RedirectResponse
+
from auth.identity import Identity
from auth.tokenstorage import TokenStorage
-from settings import OAUTH_CLIENTS, FRONTEND_URL
+from settings import FRONTEND_URL, OAUTH_CLIENTS
oauth = OAuth()
diff --git a/auth/tokenstorage.py b/auth/tokenstorage.py
index c61aa848..8e5fc20d 100644
--- a/auth/tokenstorage.py
+++ b/auth/tokenstorage.py
@@ -1,9 +1,9 @@
from datetime import datetime, timedelta, timezone
from auth.jwtcodec import JWTCodec
-from validations.auth import AuthInput
from base.redis import redis
-from settings import SESSION_TOKEN_LIFE_SPAN, ONETIME_TOKEN_LIFE_SPAN
+from settings import ONETIME_TOKEN_LIFE_SPAN, SESSION_TOKEN_LIFE_SPAN
+from validations.auth import AuthInput
async def save(token_key, life_span, auto_delete=True):
@@ -35,7 +35,7 @@ class SessionToken:
class TokenStorage:
@staticmethod
async def get(token_key):
- print('[tokenstorage.get] ' + token_key)
+ print("[tokenstorage.get] " + token_key)
# 2041-user@domain.zn-eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VyX2lkIjoyMDQxLCJ1c2VybmFtZSI6ImFudG9uLnJld2luK3Rlc3QtbG9hZGNoYXRAZ21haWwuY29tIiwiZXhwIjoxNjcxNzgwNjE2LCJpYXQiOjE2NjkxODg2MTYsImlzcyI6ImRpc2NvdXJzIn0.Nml4oV6iMjMmc6xwM7lTKEZJKBXvJFEIZ-Up1C1rITQ
return await redis.execute("GET", token_key)
diff --git a/base/exceptions.py b/base/exceptions.py
index 1f3344e7..2cf7bdeb 100644
--- a/base/exceptions.py
+++ b/base/exceptions.py
@@ -1,8 +1,8 @@
from graphql.error import GraphQLError
-
# TODO: remove traceback from logs for defined exceptions
+
class BaseHttpException(GraphQLError):
code = 500
message = "500 Server error"
diff --git a/base/orm.py b/base/orm.py
index 8d2e65ad..6f1e2ce4 100644
--- a/base/orm.py
+++ b/base/orm.py
@@ -1,15 +1,13 @@
-from typing import TypeVar, Any, Dict, Generic, Callable
+from typing import Any, Callable, Dict, Generic, TypeVar
-from sqlalchemy import create_engine, Column, Integer
+from sqlalchemy import Column, Integer, create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import Session
from sqlalchemy.sql.schema import Table
from settings import DB_URL
-engine = create_engine(
- DB_URL, echo=False, pool_size=10, max_overflow=20
-)
+engine = create_engine(DB_URL, echo=False, pool_size=10, max_overflow=20)
T = TypeVar("T")
@@ -20,7 +18,10 @@ def local_session():
return Session(bind=engine, expire_on_commit=False)
-class Base(declarative_base()):
+DeclarativeBase = declarative_base() # type: Any
+
+
+class Base(DeclarativeBase):
__table__: Table
__tablename__: str
__new__: Callable
@@ -29,7 +30,7 @@ class Base(declarative_base()):
__abstract__ = True
__table_args__ = {"extend_existing": True}
- id = Column(Integer, primary_key=True)
+ id: Column | None = Column(Integer, primary_key=True)
def __init_subclass__(cls, **kwargs):
REGISTRY[cls.__name__] = cls
@@ -47,7 +48,7 @@ class Base(declarative_base()):
def update(self, input):
column_names = self.__table__.columns.keys()
- for (name, value) in input.items():
+ for name, value in input.items():
if name in column_names:
setattr(self, name, value)
diff --git a/base/redis.py b/base/redis.py
index 52a49caa..d5d4babd 100644
--- a/base/redis.py
+++ b/base/redis.py
@@ -1,5 +1,7 @@
-from aioredis import from_url
from asyncio import sleep
+
+from aioredis import from_url
+
from settings import REDIS_URL
diff --git a/generate_gql_types.sh b/generate_gql_types.sh
new file mode 100755
index 00000000..1a5c2d3f
--- /dev/null
+++ b/generate_gql_types.sh
@@ -0,0 +1 @@
+python -m gql_schema_codegen -p ./schema.graphql -t ./schema_types.py
diff --git a/lint.sh b/lint.sh
index 69fb32f7..f14e43b1 100755
--- a/lint.sh
+++ b/lint.sh
@@ -1,16 +1,10 @@
#!/usr/bin/env bash
-set -e
-
-find . -name "*.py[co]" -o -name __pycache__ -exec rm -rf {} +
-#rm -rf .mypy_cache
echo "> isort"
-isort --gitignore --settings-file=setup.cfg .
-echo "> brunette"
-brunette --config=setup.cfg .
+isort .
+echo "> black"
+black .
echo "> flake8"
-flake8 --config=setup.cfg .
-echo "> mypy"
-mypy --config-file=setup.cfg .
-echo "> prettyjson"
-python3 -m scripts.prettyjson
+flake8 .
+# echo "> mypy"
+# mypy .
diff --git a/main.py b/main.py
index 6bb17a86..eb21b15c 100644
--- a/main.py
+++ b/main.py
@@ -2,6 +2,7 @@ import asyncio
import os
from importlib import import_module
from os.path import exists
+
from ariadne import load_schema_from_path, make_executable_schema
from ariadne.asgi import GraphQL
from starlette.applications import Starlette
@@ -9,23 +10,24 @@ from starlette.middleware import Middleware
from starlette.middleware.authentication import AuthenticationMiddleware
from starlette.middleware.sessions import SessionMiddleware
from starlette.routing import Route
-from orm import init_tables
from auth.authenticate import JWTAuthenticate
-from auth.oauth import oauth_login, oauth_authorize
+from auth.oauth import oauth_authorize, oauth_login
from base.redis import redis
from base.resolvers import resolvers
+from orm import init_tables
from resolvers.auth import confirm_email_handler
from resolvers.upload import upload_handler
from services.main import storages_init
from services.notifications.notification_service import notification_service
+from services.notifications.sse import sse_subscribe_handler
from services.stat.viewed import ViewedStorage
+
# from services.zine.gittask import GitTask
from settings import DEV_SERVER_PID_FILE_NAME, SENTRY_DSN, SESSION_SECRET_KEY
-from services.notifications.sse import sse_subscribe_handler
import_module("resolvers")
-schema = make_executable_schema(load_schema_from_path("schema.graphql"), resolvers) # type: ignore
+schema = make_executable_schema(load_schema_from_path("schema.graphql"), resolvers)
middleware = [
Middleware(AuthenticationMiddleware, backend=JWTAuthenticate()),
@@ -46,9 +48,10 @@ async def start_up():
try:
import sentry_sdk
+
sentry_sdk.init(SENTRY_DSN)
except Exception as e:
- print('[sentry] init error')
+ print("[sentry] init error")
print(e)
@@ -57,7 +60,7 @@ async def dev_start_up():
await redis.connect()
return
else:
- with open(DEV_SERVER_PID_FILE_NAME, 'w', encoding='utf-8') as f:
+ with open(DEV_SERVER_PID_FILE_NAME, "w", encoding="utf-8") as f:
f.write(str(os.getpid()))
await start_up()
@@ -72,7 +75,7 @@ routes = [
Route("/oauth/{provider}", endpoint=oauth_login),
Route("/oauth-authorize", endpoint=oauth_authorize),
Route("/confirm/{token}", endpoint=confirm_email_handler),
- Route("/upload", endpoint=upload_handler, methods=['POST']),
+ Route("/upload", endpoint=upload_handler, methods=["POST"]),
Route("/subscribe/{user_id}", endpoint=sse_subscribe_handler),
]
@@ -82,9 +85,7 @@ app = Starlette(
middleware=middleware,
routes=routes,
)
-app.mount("/", GraphQL(
- schema
-))
+app.mount("/", GraphQL(schema))
dev_app = Starlette(
debug=True,
@@ -93,7 +94,4 @@ dev_app = Starlette(
middleware=middleware,
routes=routes,
)
-dev_app.mount("/", GraphQL(
- schema,
- debug=True
-))
+dev_app.mount("/", GraphQL(schema, debug=True))
diff --git a/migrate.sh b/migrate.sh
index 2c1189da..f63aac19 100644
--- a/migrate.sh
+++ b/migrate.sh
@@ -16,4 +16,3 @@ echo "Start migration"
python3 server.py migrate
if [ $? -ne 0 ]; then { echo "Migration failed, aborting." ; exit 1; } fi
echo 'Done!'
-
diff --git a/migration/__init__.py b/migration/__init__.py
index 468fa886..08d1c8ff 100644
--- a/migration/__init__.py
+++ b/migration/__init__.py
@@ -12,10 +12,12 @@ from migration.tables.comments import migrate as migrateComment
from migration.tables.comments import migrate_2stage as migrateComment_2stage
from migration.tables.content_items import get_shout_slug
from migration.tables.content_items import migrate as migrateShout
-from migration.tables.remarks import migrate as migrateRemark
+
+# from migration.tables.remarks import migrate as migrateRemark
from migration.tables.topics import migrate as migrateTopic
-from migration.tables.users import migrate as migrateUser, post_migrate as users_post_migrate
+from migration.tables.users import migrate as migrateUser
from migration.tables.users import migrate_2stage as migrateUser_2stage
+from migration.tables.users import post_migrate as users_post_migrate
from orm import init_tables
from orm.reaction import Reaction
@@ -63,16 +65,8 @@ async def topics_handle(storage):
del storage["topics"]["by_slug"][oldslug]
storage["topics"]["by_oid"][oid] = storage["topics"]["by_slug"][newslug]
print("[migration] " + str(counter) + " topics migrated")
- print(
- "[migration] "
- + str(len(storage["topics"]["by_oid"].values()))
- + " topics by oid"
- )
- print(
- "[migration] "
- + str(len(storage["topics"]["by_slug"].values()))
- + " topics by slug"
- )
+ print("[migration] " + str(len(storage["topics"]["by_oid"].values())) + " topics by oid")
+ print("[migration] " + str(len(storage["topics"]["by_slug"].values())) + " topics by slug")
async def shouts_handle(storage, args):
@@ -117,9 +111,10 @@ async def shouts_handle(storage, args):
# print main counter
counter += 1
- print('[migration] shouts_handle %d: %s @%s' % (
- (counter + 1), shout_dict["slug"], author["slug"]
- ))
+ print(
+ "[migration] shouts_handle %d: %s @%s"
+ % ((counter + 1), shout_dict["slug"], author["slug"])
+ )
b = bs4.BeautifulSoup(shout_dict["body"], "html.parser")
texts = [shout_dict["title"].lower().replace(r"[^а-яА-Яa-zA-Z]", "")]
@@ -138,13 +133,13 @@ async def shouts_handle(storage, args):
print("[migration] " + str(anonymous_author) + " authored by @anonymous")
-async def remarks_handle(storage):
- print("[migration] comments")
- c = 0
- for entry_remark in storage["remarks"]["data"]:
- remark = await migrateRemark(entry_remark, storage)
- c += 1
- print("[migration] " + str(c) + " remarks migrated")
+# async def remarks_handle(storage):
+# print("[migration] comments")
+# c = 0
+# for entry_remark in storage["remarks"]["data"]:
+# remark = await migrateRemark(entry_remark, storage)
+# c += 1
+# print("[migration] " + str(c) + " remarks migrated")
async def comments_handle(storage):
@@ -155,9 +150,9 @@ async def comments_handle(storage):
for oldcomment in storage["reactions"]["data"]:
if not oldcomment.get("deleted"):
reaction = await migrateComment(oldcomment, storage)
- if type(reaction) == str:
+ if isinstance(reaction, str):
missed_shouts[reaction] = oldcomment
- elif type(reaction) == Reaction:
+ elif isinstance(reaction, Reaction):
reaction = reaction.dict()
rid = reaction["id"]
oid = reaction["oid"]
@@ -214,9 +209,7 @@ def data_load():
tags_data = json.loads(open("migration/data/tags.json").read())
storage["topics"]["tags"] = tags_data
print("[migration.load] " + str(len(tags_data)) + " tags ")
- cats_data = json.loads(
- open("migration/data/content_item_categories.json").read()
- )
+ cats_data = json.loads(open("migration/data/content_item_categories.json").read())
storage["topics"]["cats"] = cats_data
print("[migration.load] " + str(len(cats_data)) + " cats ")
comments_data = json.loads(open("migration/data/comments.json").read())
@@ -235,11 +228,7 @@ def data_load():
storage["users"]["by_oid"][x["_id"]] = x
# storage['users']['by_slug'][x['slug']] = x
# no user.slug yet
- print(
- "[migration.load] "
- + str(len(storage["users"]["by_oid"].keys()))
- + " users by oid"
- )
+ print("[migration.load] " + str(len(storage["users"]["by_oid"].keys())) + " users by oid")
for x in tags_data:
storage["topics"]["by_oid"][x["_id"]] = x
storage["topics"]["by_slug"][x["slug"]] = x
@@ -247,9 +236,7 @@ def data_load():
storage["topics"]["by_oid"][x["_id"]] = x
storage["topics"]["by_slug"][x["slug"]] = x
print(
- "[migration.load] "
- + str(len(storage["topics"]["by_slug"].keys()))
- + " topics by slug"
+ "[migration.load] " + str(len(storage["topics"]["by_slug"].keys())) + " topics by slug"
)
for item in content_data:
slug = get_shout_slug(item)
diff --git a/migration/bson2json.py b/migration/bson2json.py
index 03effe19..c34cf505 100644
--- a/migration/bson2json.py
+++ b/migration/bson2json.py
@@ -1,8 +1,9 @@
+import gc
import json
import os
import bson
-import gc
+
from .utils import DateTimeEncoder
@@ -15,10 +16,10 @@ def json_tables():
"email_subscriptions": [],
"users": [],
"comments": [],
- "remarks": []
+ "remarks": [],
}
for table in data.keys():
- print('[migration] bson2json for ' + table)
+ print("[migration] bson2json for " + table)
gc.collect()
lc = []
bs = open("dump/discours/" + table + ".bson", "rb").read()
diff --git a/migration/export.py b/migration/export.py
index 102cfb14..42004ee3 100644
--- a/migration/export.py
+++ b/migration/export.py
@@ -71,47 +71,29 @@ def export_slug(slug, storage):
def export_email_subscriptions():
- email_subscriptions_data = json.loads(
- open("migration/data/email_subscriptions.json").read()
- )
+ email_subscriptions_data = json.loads(open("migration/data/email_subscriptions.json").read())
for data in email_subscriptions_data:
# TODO: migrate to mailgun list manually
# migrate_email_subscription(data)
pass
- print(
- "[migration] "
- + str(len(email_subscriptions_data))
- + " email subscriptions exported"
- )
+ print("[migration] " + str(len(email_subscriptions_data)) + " email subscriptions exported")
def export_shouts(storage):
# update what was just migrated or load json again
if len(storage["users"]["by_slugs"].keys()) == 0:
- storage["users"]["by_slugs"] = json.loads(
- open(EXPORT_DEST + "authors.json").read()
- )
- print(
- "[migration] "
- + str(len(storage["users"]["by_slugs"].keys()))
- + " exported authors "
- )
+ storage["users"]["by_slugs"] = json.loads(open(EXPORT_DEST + "authors.json").read())
+ print("[migration] " + str(len(storage["users"]["by_slugs"].keys())) + " exported authors ")
if len(storage["shouts"]["by_slugs"].keys()) == 0:
- storage["shouts"]["by_slugs"] = json.loads(
- open(EXPORT_DEST + "articles.json").read()
- )
+ storage["shouts"]["by_slugs"] = json.loads(open(EXPORT_DEST + "articles.json").read())
print(
- "[migration] "
- + str(len(storage["shouts"]["by_slugs"].keys()))
- + " exported articles "
+ "[migration] " + str(len(storage["shouts"]["by_slugs"].keys())) + " exported articles "
)
for slug in storage["shouts"]["by_slugs"].keys():
export_slug(slug, storage)
-def export_json(
- export_articles={}, export_authors={}, export_topics={}, export_comments={}
-):
+def export_json(export_articles={}, export_authors={}, export_topics={}, export_comments={}):
open(EXPORT_DEST + "authors.json", "w").write(
json.dumps(
export_authors,
@@ -152,8 +134,4 @@ def export_json(
ensure_ascii=False,
)
)
- print(
- "[migration] "
- + str(len(export_comments.items()))
- + " exported articles with comments"
- )
+ print("[migration] " + str(len(export_comments.items())) + " exported articles with comments")
diff --git a/migration/extract.py b/migration/extract.py
index fcd293e6..466bf036 100644
--- a/migration/extract.py
+++ b/migration/extract.py
@@ -1,11 +1,8 @@
-import base64
import os
import re
-import uuid
from bs4 import BeautifulSoup
-
TOOLTIP_REGEX = r"(\/\/\/(.+)\/\/\/)"
contentDir = os.path.join(
os.path.dirname(os.path.realpath(__file__)), "..", "..", "discoursio-web", "content"
@@ -27,76 +24,79 @@ def replace_tooltips(body):
return newbody
-
-def extract_footnotes(body, shout_dict):
- parts = body.split("&&&")
- lll = len(parts)
- newparts = list(parts)
- placed = False
- if lll & 1:
- if lll > 1:
- i = 1
- print("[extract] found %d footnotes in body" % (lll - 1))
- for part in parts[1:]:
- if i & 1:
- placed = True
- if 'a class="footnote-url" href=' in part:
- print("[extract] footnote: " + part)
- fn = 'a class="footnote-url" href="'
- exxtracted_link = part.split(fn, 1)[1].split('"', 1)[0]
- extracted_body = part.split(fn, 1)[1].split('>', 1)[1].split('', 1)[0]
- print("[extract] footnote link: " + extracted_link)
- with local_session() as session:
- Reaction.create({
- "shout": shout_dict['id'],
- "kind": ReactionKind.FOOTNOTE,
- "body": extracted_body,
- "range": str(body.index(fn + link) - len('<')) + ':' + str(body.index(extracted_body) + len(''))
- })
- newparts[i] = "ℹ️"
- else:
- newparts[i] = part
- i += 1
- return ("".join(newparts), placed)
+# def extract_footnotes(body, shout_dict):
+# parts = body.split("&&&")
+# lll = len(parts)
+# newparts = list(parts)
+# placed = False
+# if lll & 1:
+# if lll > 1:
+# i = 1
+# print("[extract] found %d footnotes in body" % (lll - 1))
+# for part in parts[1:]:
+# if i & 1:
+# placed = True
+# if 'a class="footnote-url" href=' in part:
+# print("[extract] footnote: " + part)
+# fn = 'a class="footnote-url" href="'
+# exxtracted_link = part.split(fn, 1)[1].split('"', 1)[0]
+# extracted_body = part.split(fn, 1)[1].split(">", 1)[1].split("", 1)[0]
+# print("[extract] footnote link: " + extracted_link)
+# with local_session() as session:
+# Reaction.create(
+# {
+# "shout": shout_dict["id"],
+# "kind": ReactionKind.FOOTNOTE,
+# "body": extracted_body,
+# "range": str(body.index(fn + link) - len("<"))
+# + ":"
+# + str(body.index(extracted_body) + len("")),
+# }
+# )
+# newparts[i] = "ℹ️"
+# else:
+# newparts[i] = part
+# i += 1
+# return ("".join(newparts), placed)
-def place_tooltips(body):
- parts = body.split("&&&")
- lll = len(parts)
- newparts = list(parts)
- placed = False
- if lll & 1:
- if lll > 1:
- i = 1
- print("[extract] found %d tooltips" % (lll - 1))
- for part in parts[1:]:
- if i & 1:
- placed = True
- if 'a class="footnote-url" href=' in part:
- print("[extract] footnote: " + part)
- fn = 'a class="footnote-url" href="'
- link = part.split(fn, 1)[1].split('"', 1)[0]
- extracted_part = (
- part.split(fn, 1)[0] + " " + part.split("/", 1)[-1]
- )
- newparts[i] = (
- ""
- + extracted_part
- + " "
- )
- else:
- newparts[i] = "%s " % part
- # print('[extract] ' + newparts[i])
- else:
- # print('[extract] ' + part[:10] + '..')
- newparts[i] = part
- i += 1
- return ("".join(newparts), placed)
+# def place_tooltips(body):
+# parts = body.split("&&&")
+# lll = len(parts)
+# newparts = list(parts)
+# placed = False
+# if lll & 1:
+# if lll > 1:
+# i = 1
+# print("[extract] found %d tooltips" % (lll - 1))
+# for part in parts[1:]:
+# if i & 1:
+# placed = True
+# if 'a class="footnote-url" href=' in part:
+# print("[extract] footnote: " + part)
+# fn = 'a class="footnote-url" href="'
+# link = part.split(fn, 1)[1].split('"', 1)[0]
+# extracted_part = part.split(fn, 1)[0] + " " + part.split("/", 1)[-1]
+# newparts[i] = (
+# ""
+# + extracted_part
+# + " "
+# )
+# else:
+# newparts[i] = "%s " % part
+# # print('[extract] ' + newparts[i])
+# else:
+# # print('[extract] ' + part[:10] + '..')
+# newparts[i] = part
+# i += 1
+# return ("".join(newparts), placed)
-IMG_REGEX = r"\!\[(.*?)\]\((data\:image\/(png|jpeg|jpg);base64\,((?:[A-Za-z\d+\/]{4})*(?:[A-Za-z\d+\/]{3}="
+IMG_REGEX = (
+ r"\!\[(.*?)\]\((data\:image\/(png|jpeg|jpg);base64\,((?:[A-Za-z\d+\/]{4})*(?:[A-Za-z\d+\/]{3}="
+)
IMG_REGEX += r"|[A-Za-z\d+\/]{2}==)))\)"
parentDir = "/".join(os.getcwd().split("/")[:-1])
@@ -104,29 +104,29 @@ public = parentDir + "/discoursio-web/public"
cache = {}
-def reextract_images(body, oid):
- # change if you prefer regexp
- matches = list(re.finditer(IMG_REGEX, body, re.IGNORECASE | re.MULTILINE))[1:]
- i = 0
- for match in matches:
- print("[extract] image " + match.group(1))
- ext = match.group(3)
- name = oid + str(i)
- link = public + "/upload/image-" + name + "." + ext
- img = match.group(4)
- title = match.group(1) # NOTE: this is not the title
- if img not in cache:
- content = base64.b64decode(img + "==")
- print(str(len(img)) + " image bytes been written")
- open("../" + link, "wb").write(content)
- cache[img] = name
- i += 1
- else:
- print("[extract] image cached " + cache[img])
- body.replace(
- str(match), ""
- ) # WARNING: this does not work
- return body
+# def reextract_images(body, oid):
+# # change if you prefer regexp
+# matches = list(re.finditer(IMG_REGEX, body, re.IGNORECASE | re.MULTILINE))[1:]
+# i = 0
+# for match in matches:
+# print("[extract] image " + match.group(1))
+# ext = match.group(3)
+# name = oid + str(i)
+# link = public + "/upload/image-" + name + "." + ext
+# img = match.group(4)
+# title = match.group(1) # NOTE: this is not the title
+# if img not in cache:
+# content = base64.b64decode(img + "==")
+# print(str(len(img)) + " image bytes been written")
+# open("../" + link, "wb").write(content)
+# cache[img] = name
+# i += 1
+# else:
+# print("[extract] image cached " + cache[img])
+# body.replace(
+# str(match), ""
+# ) # WARNING: this does not work
+# return body
IMAGES = {
@@ -137,163 +137,11 @@ IMAGES = {
b64 = ";base64,"
-
-def extract_imageparts(bodyparts, prefix):
- # recursive loop
- newparts = list(bodyparts)
- for current in bodyparts:
- i = bodyparts.index(current)
- for mime in IMAGES.keys():
- if mime == current[-len(mime) :] and (i + 1 < len(bodyparts)):
- print("[extract] " + mime)
- next = bodyparts[i + 1]
- ext = IMAGES[mime]
- b64end = next.index(")")
- b64encoded = next[:b64end]
- name = prefix + "-" + str(len(cache))
- link = "/upload/image-" + name + "." + ext
- print("[extract] name: " + name)
- print("[extract] link: " + link)
- print("[extract] %d bytes" % len(b64encoded))
- if b64encoded not in cache:
- try:
- content = base64.b64decode(b64encoded + "==")
- open(public + link, "wb").write(content)
- print(
- "[extract] "
- + str(len(content))
- + " image bytes been written"
- )
- cache[b64encoded] = name
- except Exception:
- raise Exception
- # raise Exception('[extract] error decoding image %r' %b64encoded)
- else:
- print("[extract] cached link " + cache[b64encoded])
- name = cache[b64encoded]
- link = cdn + "/upload/image-" + name + "." + ext
- newparts[i] = (
- current[: -len(mime)]
- + current[-len(mime) :]
- + link
- + next[-b64end:]
- )
- newparts[i + 1] = next[:-b64end]
- break
- return (
- extract_imageparts(
- newparts[i] + newparts[i + 1] + b64.join(bodyparts[(i + 2) :]), prefix
- )
- if len(bodyparts) > (i + 1)
- else "".join(newparts)
- )
-
-
-def extract_dataimages(parts, prefix):
- newparts = list(parts)
- for part in parts:
- i = parts.index(part)
- if part.endswith("]("):
- [ext, rest] = parts[i + 1].split(b64)
- name = prefix + "-" + str(len(cache))
- if ext == "/jpeg":
- ext = "jpg"
- else:
- ext = ext.replace("/", "")
- link = "/upload/image-" + name + "." + ext
- print("[extract] filename: " + link)
- b64end = rest.find(")")
- if b64end != -1:
- b64encoded = rest[:b64end]
- print("[extract] %d text bytes" % len(b64encoded))
- # write if not cached
- if b64encoded not in cache:
- try:
- content = base64.b64decode(b64encoded + "==")
- open(public + link, "wb").write(content)
- print("[extract] " + str(len(content)) + " image bytes")
- cache[b64encoded] = name
- except Exception:
- raise Exception
- # raise Exception('[extract] error decoding image %r' %b64encoded)
- else:
- print("[extract] 0 image bytes, cached for " + cache[b64encoded])
- name = cache[b64encoded]
-
- # update link with CDN
- link = cdn + "/upload/image-" + name + "." + ext
-
- # patch newparts
- newparts[i + 1] = link + rest[b64end:]
- else:
- raise Exception("cannot find the end of base64 encoded string")
- else:
- print("[extract] dataimage skipping part " + str(i))
- continue
- return "".join(newparts)
-
-
di = "data:image"
-def extract_md_images(body, prefix):
- newbody = ""
- body = (
- body.replace("\n! [](" + di, "\n 
- .replace("\n[](" + di, "\n
- .replace(" [](" + di, " 
- )
- parts = body.split(di)
- if len(parts) > 1:
- newbody = extract_dataimages(parts, prefix)
- else:
- newbody = body
- return newbody
-
-
-def cleanup_md(body):
- newbody = (
- body.replace("<", "")
- .replace(">", "")
- .replace("{", "(")
- .replace("}", ")")
- .replace("…", "...")
- .replace(" __ ", " ")
- .replace("_ _", " ")
- .replace("****", "")
- .replace("\u00a0", " ")
- .replace("\u02c6", "^")
- .replace("\u00a0", " ")
- .replace("\ufeff", "")
- .replace("\u200b", "")
- .replace("\u200c", "")
- ) # .replace('\u2212', '-')
- return newbody
-
-
-def extract_md(body, shout_dict = None):
- newbody = body
- if newbody:
- newbody = cleanup_md(newbody)
- if not newbody:
- raise Exception("cleanup error")
-
- if shout_dict:
-
- uid = shout_dict['id'] or uuid.uuid4()
- newbody = extract_md_images(newbody, uid)
- if not newbody:
- raise Exception("extract_images error")
-
- newbody, placed = extract_footnotes(body, shout_dict)
- if not newbody:
- raise Exception("extract_footnotes error")
-
- return newbody
-
-
def extract_media(entry):
- ''' normalized media extraction method '''
+ """normalized media extraction method"""
# media [ { title pic url body } ]}
kind = entry.get("type")
if not kind:
@@ -323,12 +171,7 @@ def extract_media(entry):
url = "https://vimeo.com/" + m["vimeoId"]
# body
body = m.get("body") or m.get("literatureBody") or ""
- media.append({
- "url": url,
- "pic": pic,
- "title": title,
- "body": body
- })
+ media.append({"url": url, "pic": pic, "title": title, "body": body})
return media
@@ -398,9 +241,7 @@ def cleanup_html(body: str) -> str:
r"\s*
",
r"\s*",
]
- regex_replace = {
- r"
\s*": ""
- }
+ regex_replace = {r"
\s*": ""}
changed = True
while changed:
# we need several iterations to clean nested tags this way
@@ -414,16 +255,17 @@ def cleanup_html(body: str) -> str:
changed = True
return new_body
-def extract_html(entry, shout_id = None, cleanup=False):
- body_orig = (entry.get("body") or "").replace('\(', '(').replace('\)', ')')
+
+def extract_html(entry, shout_id=None, cleanup=False):
+ body_orig = (entry.get("body") or "").replace(r"\(", "(").replace(r"\)", ")")
if cleanup:
# we do that before bs parsing to catch the invalid html
body_clean = cleanup_html(body_orig)
if body_clean != body_orig:
print(f"[migration] html cleaned for slug {entry.get('slug', None)}")
body_orig = body_clean
- if shout_id:
- extract_footnotes(body_orig, shout_id)
+ # if shout_id:
+ # extract_footnotes(body_orig, shout_id)
body_html = str(BeautifulSoup(body_orig, features="html.parser"))
if cleanup:
# we do that after bs parsing because it can add dummy tags
diff --git a/migration/html2text/__init__.py b/migration/html2text/__init__.py
index 1090025c..b97e6070 100644
--- a/migration/html2text/__init__.py
+++ b/migration/html2text/__init__.py
@@ -33,7 +33,7 @@ __version__ = (2020, 1, 16)
# TODO: Support decoded entities with UNIFIABLE.
-class HTML2Text(html.parser.HTMLParser):
+class HTML2Text(html.parser.HTMLParser): # noqa: C901
def __init__(
self,
out: Optional[OutCallback] = None,
@@ -85,7 +85,7 @@ class HTML2Text(html.parser.HTMLParser):
self.tag_callback = None
self.open_quote = config.OPEN_QUOTE # covered in cli
self.close_quote = config.CLOSE_QUOTE # covered in cli
- self.header_id = None
+ self.header_id: str | None = None
self.span_highlight = False
self.span_lead = False
@@ -119,9 +119,7 @@ class HTML2Text(html.parser.HTMLParser):
self.lastWasList = False
self.style = 0
self.style_def = {} # type: Dict[str, Dict[str, str]]
- self.tag_stack = (
- []
- ) # type: List[Tuple[str, Dict[str, Optional[str]], Dict[str, str]]]
+ self.tag_stack = [] # type: List[Tuple[str, Dict[str, Optional[str]], Dict[str, str]]]
self.emphasis = 0
self.drop_white_space = 0
self.inheader = False
@@ -227,7 +225,7 @@ class HTML2Text(html.parser.HTMLParser):
return i
return None
- def handle_emphasis(
+ def handle_emphasis( # noqa: C901
self, start: bool, tag_style: Dict[str, str], parent_style: Dict[str, str]
) -> None:
"""
@@ -300,7 +298,7 @@ class HTML2Text(html.parser.HTMLParser):
if strikethrough:
self.quiet -= 1
- def handle_tag(
+ def handle_tag( # noqa: C901
self, tag: str, attrs: Dict[str, Optional[str]], start: bool
) -> None:
self.current_tag = tag
@@ -333,9 +331,7 @@ class HTML2Text(html.parser.HTMLParser):
tag_style = element_style(attrs, self.style_def, parent_style)
self.tag_stack.append((tag, attrs, tag_style))
else:
- dummy, attrs, tag_style = (
- self.tag_stack.pop() if self.tag_stack else (None, {}, {})
- )
+ dummy, attrs, tag_style = self.tag_stack.pop() if self.tag_stack else (None, {}, {})
if self.tag_stack:
parent_style = self.tag_stack[-1][2]
@@ -385,11 +381,7 @@ class HTML2Text(html.parser.HTMLParser):
):
self.o("`") # NOTE: same as
self.span_highlight = True
- elif (
- self.current_class == "lead"
- and not self.inheader
- and not self.span_highlight
- ):
+ elif self.current_class == "lead" and not self.inheader and not self.span_highlight:
# self.o("==") # NOTE: CriticMarkup {==
self.span_lead = True
else:
@@ -479,11 +471,7 @@ class HTML2Text(html.parser.HTMLParser):
and not self.span_lead
and not self.span_highlight
):
- if (
- start
- and self.preceding_data
- and self.preceding_data[-1] == self.strong_mark[0]
- ):
+ if start and self.preceding_data and self.preceding_data[-1] == self.strong_mark[0]:
strong = " " + self.strong_mark
self.preceding_data += " "
else:
@@ -548,13 +536,8 @@ class HTML2Text(html.parser.HTMLParser):
"href" in attrs
and not attrs["href"].startswith("#_ftn")
and attrs["href"] is not None
- and not (
- self.skip_internal_links and attrs["href"].startswith("#")
- )
- and not (
- self.ignore_mailto_links
- and attrs["href"].startswith("mailto:")
- )
+ and not (self.skip_internal_links and attrs["href"].startswith("#"))
+ and not (self.ignore_mailto_links and attrs["href"].startswith("mailto:"))
):
self.astack.append(attrs)
self.maybe_automatic_link = attrs["href"]
@@ -591,7 +574,7 @@ class HTML2Text(html.parser.HTMLParser):
if tag == "img" and start and not self.ignore_images:
# skip cloudinary images
- if "src" in attrs and "cloudinary" not in attrs["src"]:
+ if "src" in attrs and ("cloudinary" not in attrs["src"]):
assert attrs["src"] is not None
if not self.images_to_alt:
attrs["href"] = attrs["src"]
@@ -638,9 +621,7 @@ class HTML2Text(html.parser.HTMLParser):
self.o("![" + escape_md(alt) + "]")
if self.inline_links:
href = attrs.get("href") or ""
- self.o(
- "(" + escape_md(urlparse.urljoin(self.baseurl, href)) + ")"
- )
+ self.o("(" + escape_md(urlparse.urljoin(self.baseurl, href)) + ")")
else:
i = self.previousIndex(attrs)
if i is not None:
@@ -696,9 +677,7 @@ class HTML2Text(html.parser.HTMLParser):
# WARNING: does not line up - s > 9 correctly.
parent_list = None
for list in self.list:
- self.o(
- " " if parent_list == "ol" and list.name == "ul" else " "
- )
+ self.o(" " if parent_list == "ol" and list.name == "ul" else " ")
parent_list = list.name
if li.name == "ul":
@@ -787,7 +766,7 @@ class HTML2Text(html.parser.HTMLParser):
self.pbr()
self.br_toggle = " "
- def o(
+ def o( # noqa: C901
self, data: str, puredata: bool = False, force: Union[bool, str] = False
) -> None:
"""
@@ -864,9 +843,7 @@ class HTML2Text(html.parser.HTMLParser):
self.out(" ")
self.space = False
- if self.a and (
- (self.p_p == 2 and self.links_each_paragraph) or force == "end"
- ):
+ if self.a and ((self.p_p == 2 and self.links_each_paragraph) or force == "end"):
if force == "end":
self.out("\n")
@@ -925,11 +902,7 @@ class HTML2Text(html.parser.HTMLParser):
if self.maybe_automatic_link is not None:
href = self.maybe_automatic_link
- if (
- href == data
- and self.absolute_url_matcher.match(href)
- and self.use_automatic_links
- ):
+ if href == data and self.absolute_url_matcher.match(href) and self.use_automatic_links:
self.o("<" + data + ">")
self.empty_link = False
return
@@ -980,7 +953,7 @@ class HTML2Text(html.parser.HTMLParser):
return nest_count
- def optwrap(self, text: str) -> str:
+ def optwrap(self, text: str) -> str: # noqa: C901
"""
Wrap all paragraphs in the provided text.
@@ -1000,9 +973,7 @@ class HTML2Text(html.parser.HTMLParser):
self.inline_links = False
for para in text.split("\n"):
if len(para) > 0:
- if not skipwrap(
- para, self.wrap_links, self.wrap_list_items, self.wrap_tables
- ):
+ if not skipwrap(para, self.wrap_links, self.wrap_list_items, self.wrap_tables):
indent = ""
if para.startswith(" " + self.ul_item_mark):
# list item continuation: add a double indent to the
@@ -1043,12 +1014,10 @@ class HTML2Text(html.parser.HTMLParser):
return result
-def html2text(
- html: str, baseurl: str = "", bodywidth: Optional[int] = config.BODY_WIDTH
-) -> str:
+def html2text(html: str, baseurl: str = "", bodywidth: int = config.BODY_WIDTH) -> str:
h = html.strip() or ""
if h:
- h = HTML2Text(baseurl=baseurl, bodywidth=bodywidth)
- h = h.handle(html.strip())
+ h2t = HTML2Text(baseurl=baseurl, bodywidth=bodywidth)
+ h = h2t.handle(html.strip())
# print('[html2text] %d bytes' % len(html))
return h
diff --git a/migration/html2text/cli.py b/migration/html2text/cli.py
index dbaba28b..62e0738f 100644
--- a/migration/html2text/cli.py
+++ b/migration/html2text/cli.py
@@ -117,10 +117,7 @@ def main() -> None:
dest="images_with_size",
action="store_true",
default=config.IMAGES_WITH_SIZE,
- help=(
- "Write image tags with height and width attrs as raw html to retain "
- "dimensions"
- ),
+ help=("Write image tags with height and width attrs as raw html to retain " "dimensions"),
)
p.add_argument(
"-g",
@@ -260,9 +257,7 @@ def main() -> None:
default=config.CLOSE_QUOTE,
help="The character used to close quotes",
)
- p.add_argument(
- "--version", action="version", version=".".join(map(str, __version__))
- )
+ p.add_argument("--version", action="version", version=".".join(map(str, __version__)))
p.add_argument("filename", nargs="?")
p.add_argument("encoding", nargs="?", default="utf-8")
args = p.parse_args()
diff --git a/migration/html2text/utils.py b/migration/html2text/utils.py
index 1cf22b52..568e1fc5 100644
--- a/migration/html2text/utils.py
+++ b/migration/html2text/utils.py
@@ -4,9 +4,7 @@ from typing import Dict, List, Optional
from . import config
unifiable_n = {
- html.entities.name2codepoint[k]: v
- for k, v in config.UNIFIABLE.items()
- if k != "nbsp"
+ html.entities.name2codepoint[k]: v for k, v in config.UNIFIABLE.items() if k != "nbsp"
}
@@ -68,12 +66,14 @@ def element_style(
:rtype: dict
"""
style = parent_style.copy()
- if attrs.get("class"):
- for css_class in attrs["class"].split():
+ attrs_class = attrs.get("class")
+ if attrs_class:
+ for css_class in attrs_class.split():
css_style = style_def.get("." + css_class, {})
style.update(css_style)
- if attrs.get("style"):
- immediate_style = dumb_property_dict(attrs["style"])
+ attrs_style = attrs.get("style")
+ if attrs_style:
+ immediate_style = dumb_property_dict(attrs_style)
style.update(immediate_style)
return style
@@ -147,18 +147,17 @@ def list_numbering_start(attrs: Dict[str, Optional[str]]) -> int:
:rtype: int or None
"""
- if attrs.get("start"):
+ attrs_start = attrs.get("start")
+ if attrs_start:
try:
- return int(attrs["start"]) - 1
+ return int(attrs_start) - 1
except ValueError:
pass
return 0
-def skipwrap(
- para: str, wrap_links: bool, wrap_list_items: bool, wrap_tables: bool
-) -> bool:
+def skipwrap(para: str, wrap_links: bool, wrap_list_items: bool, wrap_tables: bool) -> bool:
# If it appears to contain a link
# don't wrap
if not wrap_links and config.RE_LINK.search(para):
@@ -236,9 +235,7 @@ def reformat_table(lines: List[str], right_margin: int) -> List[str]:
max_width += [len(x) + right_margin for x in cols[-(num_cols - max_cols) :]]
max_cols = num_cols
- max_width = [
- max(len(x) + right_margin, old_len) for x, old_len in zip(cols, max_width)
- ]
+ max_width = [max(len(x) + right_margin, old_len) for x, old_len in zip(cols, max_width)]
# reformat
new_lines = []
@@ -247,15 +244,13 @@ def reformat_table(lines: List[str], right_margin: int) -> List[str]:
if set(line.strip()) == set("-|"):
filler = "-"
new_cols = [
- x.rstrip() + (filler * (M - len(x.rstrip())))
- for x, M in zip(cols, max_width)
+ x.rstrip() + (filler * (M - len(x.rstrip()))) for x, M in zip(cols, max_width)
]
new_lines.append("|-" + "|".join(new_cols) + "|")
else:
filler = " "
new_cols = [
- x.rstrip() + (filler * (M - len(x.rstrip())))
- for x, M in zip(cols, max_width)
+ x.rstrip() + (filler * (M - len(x.rstrip()))) for x, M in zip(cols, max_width)
]
new_lines.append("| " + "|".join(new_cols) + "|")
return new_lines
diff --git a/migration/tables/__init__.py b/migration/tables/__init__.py
deleted file mode 100644
index 8e7ee938..00000000
--- a/migration/tables/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-__all__ = (["users", "topics", "content_items", "comments"],)
diff --git a/migration/tables/comments.py b/migration/tables/comments.py
index 82e32924..16c91228 100644
--- a/migration/tables/comments.py
+++ b/migration/tables/comments.py
@@ -5,61 +5,48 @@ from dateutil.parser import parse as date_parse
from base.orm import local_session
from migration.html2text import html2text
from orm.reaction import Reaction, ReactionKind
-from orm.shout import ShoutReactionsFollower
+from orm.shout import Shout, ShoutReactionsFollower
from orm.topic import TopicFollower
from orm.user import User
-from orm.shout import Shout
ts = datetime.now(tz=timezone.utc)
def auto_followers(session, topics, reaction_dict):
# creating shout's reactions following for reaction author
- following1 = session.query(
- ShoutReactionsFollower
- ).where(
- ShoutReactionsFollower.follower == reaction_dict["createdBy"]
- ).filter(
- ShoutReactionsFollower.shout == reaction_dict["shout"]
- ).first()
+ following1 = (
+ session.query(ShoutReactionsFollower)
+ .where(ShoutReactionsFollower.follower == reaction_dict["createdBy"])
+ .filter(ShoutReactionsFollower.shout == reaction_dict["shout"])
+ .first()
+ )
if not following1:
following1 = ShoutReactionsFollower.create(
- follower=reaction_dict["createdBy"],
- shout=reaction_dict["shout"],
- auto=True
+ follower=reaction_dict["createdBy"], shout=reaction_dict["shout"], auto=True
)
session.add(following1)
# creating topics followings for reaction author
for t in topics:
- tf = session.query(
- TopicFollower
- ).where(
- TopicFollower.follower == reaction_dict["createdBy"]
- ).filter(
- TopicFollower.topic == t['id']
- ).first()
+ tf = (
+ session.query(TopicFollower)
+ .where(TopicFollower.follower == reaction_dict["createdBy"])
+ .filter(TopicFollower.topic == t["id"])
+ .first()
+ )
if not tf:
topic_following = TopicFollower.create(
- follower=reaction_dict["createdBy"],
- topic=t['id'],
- auto=True
+ follower=reaction_dict["createdBy"], topic=t["id"], auto=True
)
session.add(topic_following)
def migrate_ratings(session, entry, reaction_dict):
for comment_rating_old in entry.get("ratings", []):
- rater = (
- session.query(User)
- .filter(User.oid == comment_rating_old["createdBy"])
- .first()
- )
+ rater = session.query(User).filter(User.oid == comment_rating_old["createdBy"]).first()
re_reaction_dict = {
"shout": reaction_dict["shout"],
"replyTo": reaction_dict["id"],
- "kind": ReactionKind.LIKE
- if comment_rating_old["value"] > 0
- else ReactionKind.DISLIKE,
+ "kind": ReactionKind.LIKE if comment_rating_old["value"] > 0 else ReactionKind.DISLIKE,
"createdBy": rater.id if rater else 1,
}
cts = comment_rating_old.get("createdAt")
@@ -68,18 +55,15 @@ def migrate_ratings(session, entry, reaction_dict):
try:
# creating reaction from old rating
rr = Reaction.create(**re_reaction_dict)
- following2 = session.query(
- ShoutReactionsFollower
- ).where(
- ShoutReactionsFollower.follower == re_reaction_dict['createdBy']
- ).filter(
- ShoutReactionsFollower.shout == rr.shout
- ).first()
+ following2 = (
+ session.query(ShoutReactionsFollower)
+ .where(ShoutReactionsFollower.follower == re_reaction_dict["createdBy"])
+ .filter(ShoutReactionsFollower.shout == rr.shout)
+ .first()
+ )
if not following2:
following2 = ShoutReactionsFollower.create(
- follower=re_reaction_dict['createdBy'],
- shout=rr.shout,
- auto=True
+ follower=re_reaction_dict["createdBy"], shout=rr.shout, auto=True
)
session.add(following2)
session.add(rr)
@@ -150,9 +134,7 @@ async def migrate(entry, storage):
else:
stage = "author and old id found"
try:
- shout = session.query(
- Shout
- ).where(Shout.slug == old_shout["slug"]).one()
+ shout = session.query(Shout).where(Shout.slug == old_shout["slug"]).one()
if shout:
reaction_dict["shout"] = shout.id
reaction_dict["createdBy"] = author.id if author else 1
@@ -178,9 +160,9 @@ async def migrate(entry, storage):
def migrate_2stage(old_comment, idmap):
- if old_comment.get('body'):
- new_id = idmap.get(old_comment.get('oid'))
- new_id = idmap.get(old_comment.get('_id'))
+ if old_comment.get("body"):
+ new_id = idmap.get(old_comment.get("oid"))
+ new_id = idmap.get(old_comment.get("_id"))
if new_id:
new_replyto_id = None
old_replyto_id = old_comment.get("replyTo")
@@ -190,17 +172,20 @@ def migrate_2stage(old_comment, idmap):
comment = session.query(Reaction).where(Reaction.id == new_id).first()
try:
if new_replyto_id:
- new_reply = session.query(Reaction).where(Reaction.id == new_replyto_id).first()
+ new_reply = (
+ session.query(Reaction).where(Reaction.id == new_replyto_id).first()
+ )
if not new_reply:
print(new_replyto_id)
raise Exception("cannot find reply by id!")
comment.replyTo = new_reply.id
session.add(comment)
- srf = session.query(ShoutReactionsFollower).where(
- ShoutReactionsFollower.shout == comment.shout
- ).filter(
- ShoutReactionsFollower.follower == comment.createdBy
- ).first()
+ srf = (
+ session.query(ShoutReactionsFollower)
+ .where(ShoutReactionsFollower.shout == comment.shout)
+ .filter(ShoutReactionsFollower.follower == comment.createdBy)
+ .first()
+ )
if not srf:
srf = ShoutReactionsFollower.create(
shout=comment.shout, follower=comment.createdBy, auto=True
diff --git a/migration/tables/content_items.py b/migration/tables/content_items.py
index a2297d98..5486b464 100644
--- a/migration/tables/content_items.py
+++ b/migration/tables/content_items.py
@@ -1,16 +1,18 @@
-from datetime import datetime, timezone
import json
+import re
+from datetime import datetime, timezone
+
from dateutil.parser import parse as date_parse
from sqlalchemy.exc import IntegrityError
from transliterate import translit
+
from base.orm import local_session
from migration.extract import extract_html, extract_media
from orm.reaction import Reaction, ReactionKind
-from orm.shout import Shout, ShoutTopic, ShoutReactionsFollower
+from orm.shout import Shout, ShoutReactionsFollower, ShoutTopic
+from orm.topic import Topic, TopicFollower
from orm.user import User
-from orm.topic import TopicFollower, Topic
from services.stat.viewed import ViewedStorage
-import re
OLD_DATE = "2016-03-05 22:22:00.350000"
ts = datetime.now(tz=timezone.utc)
@@ -33,7 +35,7 @@ def get_shout_slug(entry):
slug = friend.get("slug", "")
if slug:
break
- slug = re.sub('[^0-9a-zA-Z]+', '-', slug)
+ slug = re.sub("[^0-9a-zA-Z]+", "-", slug)
return slug
@@ -41,27 +43,27 @@ def create_author_from_app(app):
user = None
userdata = None
# check if email is used
- if app['email']:
+ if app["email"]:
with local_session() as session:
- user = session.query(User).where(User.email == app['email']).first()
+ user = session.query(User).where(User.email == app["email"]).first()
if not user:
# print('[migration] app %r' % app)
- name = app.get('name')
+ name = app.get("name")
if name:
slug = translit(name, "ru", reversed=True).lower()
- slug = re.sub('[^0-9a-zA-Z]+', '-', slug)
- print('[migration] created slug %s' % slug)
+ slug = re.sub("[^0-9a-zA-Z]+", "-", slug)
+ print("[migration] created slug %s" % slug)
# check if slug is used
if slug:
user = session.query(User).where(User.slug == slug).first()
# get slug from email
if user:
- slug = app['email'].split('@')[0]
+ slug = app["email"].split("@")[0]
user = session.query(User).where(User.slug == slug).first()
# one more try
if user:
- slug += '-author'
+ slug += "-author"
user = session.query(User).where(User.slug == slug).first()
# create user with application data
@@ -79,7 +81,7 @@ def create_author_from_app(app):
user = User.create(**userdata)
session.add(user)
session.commit()
- userdata['id'] = user.id
+ userdata["id"] = user.id
userdata = user.dict()
return userdata
@@ -91,11 +93,12 @@ async def create_shout(shout_dict):
s = Shout.create(**shout_dict)
author = s.authors[0]
with local_session() as session:
- srf = session.query(ShoutReactionsFollower).where(
- ShoutReactionsFollower.shout == s.id
- ).filter(
- ShoutReactionsFollower.follower == author.id
- ).first()
+ srf = (
+ session.query(ShoutReactionsFollower)
+ .where(ShoutReactionsFollower.shout == s.id)
+ .filter(ShoutReactionsFollower.follower == author.id)
+ .first()
+ )
if not srf:
srf = ShoutReactionsFollower.create(shout=s.id, follower=author.id, auto=True)
session.add(srf)
@@ -116,14 +119,14 @@ async def get_user(entry, storage):
elif user_oid:
userdata = storage["users"]["by_oid"].get(user_oid)
if not userdata:
- print('no userdata by oid, anonymous')
+ print("no userdata by oid, anonymous")
userdata = anondict
print(app)
# cleanup slug
if userdata:
slug = userdata.get("slug", "")
if slug:
- slug = re.sub('[^0-9a-zA-Z]+', '-', slug)
+ slug = re.sub("[^0-9a-zA-Z]+", "-", slug)
userdata["slug"] = slug
else:
userdata = anondict
@@ -137,11 +140,14 @@ async def migrate(entry, storage):
r = {
"layout": type2layout[entry["type"]],
"title": entry["title"],
- "authors": [author, ],
+ "authors": [
+ author,
+ ],
"slug": get_shout_slug(entry),
"cover": (
- "https://images.discours.io/unsafe/" +
- entry["thumborId"] if entry.get("thumborId") else entry.get("image", {}).get("url")
+ "https://images.discours.io/unsafe/" + entry["thumborId"]
+ if entry.get("thumborId")
+ else entry.get("image", {}).get("url")
),
"visibility": "public" if entry.get("published") else "community",
"publishedAt": date_parse(entry.get("publishedAt")) if entry.get("published") else None,
@@ -150,11 +156,11 @@ async def migrate(entry, storage):
"updatedAt": date_parse(entry["updatedAt"]) if "updatedAt" in entry else ts,
"createdBy": author.id,
"topics": await add_topics_follower(entry, storage, author),
- "body": extract_html(entry, cleanup=True)
+ "body": extract_html(entry, cleanup=True),
}
# main topic patch
- r['mainTopic'] = r['topics'][0]
+ r["mainTopic"] = r["topics"][0]
# published author auto-confirm
if entry.get("published"):
@@ -177,14 +183,16 @@ async def migrate(entry, storage):
shout_dict["oid"] = entry.get("_id", "")
shout = await create_shout(shout_dict)
except IntegrityError as e:
- print('[migration] create_shout integrity error', e)
+ print("[migration] create_shout integrity error", e)
shout = await resolve_create_shout(shout_dict)
except Exception as e:
raise Exception(e)
# udpate data
shout_dict = shout.dict()
- shout_dict["authors"] = [author.dict(), ]
+ shout_dict["authors"] = [
+ author.dict(),
+ ]
# shout topics aftermath
shout_dict["topics"] = await topics_aftermath(r, storage)
@@ -193,7 +201,9 @@ async def migrate(entry, storage):
await content_ratings_to_reactions(entry, shout_dict["slug"])
# shout views
- await ViewedStorage.increment(shout_dict["slug"], amount=entry.get("views", 1), viewer='old-discours')
+ await ViewedStorage.increment(
+ shout_dict["slug"], amount=entry.get("views", 1), viewer="old-discours"
+ )
# del shout_dict['ratings']
storage["shouts"]["by_oid"][entry["_id"]] = shout_dict
@@ -205,7 +215,9 @@ async def add_topics_follower(entry, storage, user):
topics = set([])
category = entry.get("category")
topics_by_oid = storage["topics"]["by_oid"]
- oids = [category, ] + entry.get("tags", [])
+ oids = [
+ category,
+ ] + entry.get("tags", [])
for toid in oids:
tslug = topics_by_oid.get(toid, {}).get("slug")
if tslug:
@@ -217,23 +229,18 @@ async def add_topics_follower(entry, storage, user):
try:
tpc = session.query(Topic).where(Topic.slug == tpcslug).first()
if tpc:
- tf = session.query(
- TopicFollower
- ).where(
- TopicFollower.follower == user.id
- ).filter(
- TopicFollower.topic == tpc.id
- ).first()
+ tf = (
+ session.query(TopicFollower)
+ .where(TopicFollower.follower == user.id)
+ .filter(TopicFollower.topic == tpc.id)
+ .first()
+ )
if not tf:
- tf = TopicFollower.create(
- topic=tpc.id,
- follower=user.id,
- auto=True
- )
+ tf = TopicFollower.create(topic=tpc.id, follower=user.id, auto=True)
session.add(tf)
session.commit()
except IntegrityError:
- print('[migration.shout] hidden by topic ' + tpc.slug)
+ print("[migration.shout] hidden by topic " + tpc.slug)
# main topic
maintopic = storage["replacements"].get(topics_by_oid.get(category, {}).get("slug"))
if maintopic in ttt:
@@ -254,7 +261,7 @@ async def process_user(userdata, storage, oid):
if not user:
try:
slug = userdata["slug"].lower().strip()
- slug = re.sub('[^0-9a-zA-Z]+', '-', slug)
+ slug = re.sub("[^0-9a-zA-Z]+", "-", slug)
userdata["slug"] = slug
user = User.create(**userdata)
session.add(user)
@@ -282,9 +289,9 @@ async def resolve_create_shout(shout_dict):
s = session.query(Shout).filter(Shout.slug == shout_dict["slug"]).first()
bump = False
if s:
- if s.createdAt != shout_dict['createdAt']:
+ if s.createdAt != shout_dict["createdAt"]:
# create new with different slug
- shout_dict["slug"] += '-' + shout_dict["layout"]
+ shout_dict["slug"] += "-" + shout_dict["layout"]
try:
await create_shout(shout_dict)
except IntegrityError as e:
@@ -295,10 +302,7 @@ async def resolve_create_shout(shout_dict):
for key in shout_dict:
if key in s.__dict__:
if s.__dict__[key] != shout_dict[key]:
- print(
- "[migration] shout already exists, but differs in %s"
- % key
- )
+ print("[migration] shout already exists, but differs in %s" % key)
bump = True
else:
print("[migration] shout already exists, but lacks %s" % key)
@@ -344,9 +348,7 @@ async def topics_aftermath(entry, storage):
)
if not shout_topic_new:
try:
- ShoutTopic.create(
- **{"shout": shout.id, "topic": new_topic.id}
- )
+ ShoutTopic.create(**{"shout": shout.id, "topic": new_topic.id})
except Exception:
print("[migration] shout topic error: " + newslug)
session.commit()
@@ -363,9 +365,7 @@ async def content_ratings_to_reactions(entry, slug):
with local_session() as session:
for content_rating in entry.get("ratings", []):
rater = (
- session.query(User)
- .filter(User.oid == content_rating["createdBy"])
- .first()
+ session.query(User).filter(User.oid == content_rating["createdBy"]).first()
) or User.default_user
shout = session.query(Shout).where(Shout.slug == slug).first()
cts = content_rating.get("createdAt")
@@ -375,7 +375,7 @@ async def content_ratings_to_reactions(entry, slug):
if content_rating["value"] > 0
else ReactionKind.DISLIKE,
"createdBy": rater.id,
- "shout": shout.id
+ "shout": shout.id,
}
reaction = (
session.query(Reaction)
diff --git a/migration/tables/remarks.py b/migration/tables/remarks.py
index 026b95c6..e09cf4fb 100644
--- a/migration/tables/remarks.py
+++ b/migration/tables/remarks.py
@@ -1,42 +1,35 @@
-from base.orm import local_session
-from migration.extract import extract_md
-from migration.html2text import html2text
-from orm.reaction import Reaction, ReactionKind
+# from base.orm import local_session
+
+# from migration.extract import extract_md
+# from migration.html2text import html2text
+# from orm.reaction import Reaction, ReactionKind
-def migrate(entry, storage):
- post_oid = entry['contentItem']
- print(post_oid)
- shout_dict = storage['shouts']['by_oid'].get(post_oid)
- if shout_dict:
- print(shout_dict['body'])
- remark = {
- "shout": shout_dict['id'],
- "body": extract_md(
- html2text(entry['body']),
- shout_dict
- ),
- "kind": ReactionKind.REMARK
- }
-
- if entry.get('textBefore'):
- remark['range'] = str(
- shout_dict['body']
- .index(
- entry['textBefore'] or ''
- )
- ) + ':' + str(
- shout_dict['body']
- .index(
- entry['textAfter'] or ''
- ) + len(
- entry['textAfter'] or ''
- )
- )
-
- with local_session() as session:
- rmrk = Reaction.create(**remark)
- session.commit()
- del rmrk["_sa_instance_state"]
- return rmrk
- return
+# def migrate(entry, storage):
+# post_oid = entry["contentItem"]
+# print(post_oid)
+# shout_dict = storage["shouts"]["by_oid"].get(post_oid)
+# if shout_dict:
+# print(shout_dict["body"])
+# remark = {
+# "shout": shout_dict["id"],
+# "body": extract_md(html2text(entry["body"]), shout_dict),
+# "kind": ReactionKind.REMARK,
+# }
+#
+# if entry.get("textBefore"):
+# remark["range"] = (
+# str(shout_dict["body"].index(entry["textBefore"] or ""))
+# + ":"
+# + str(
+# shout_dict["body"].index(entry["textAfter"] or "")
+# + len(entry["textAfter"] or "")
+# )
+# )
+#
+# with local_session() as session:
+# rmrk = Reaction.create(**remark)
+# session.commit()
+# del rmrk["_sa_instance_state"]
+# return rmrk
+# return
diff --git a/migration/tables/topics.py b/migration/tables/topics.py
index 17804376..e0e7e7a4 100644
--- a/migration/tables/topics.py
+++ b/migration/tables/topics.py
@@ -1,5 +1,4 @@
from base.orm import local_session
-from migration.extract import extract_md
from migration.html2text import html2text
from orm import Topic
@@ -10,7 +9,7 @@ def migrate(entry):
"slug": entry["slug"],
"oid": entry["_id"],
"title": entry["title"].replace(" ", " "),
- "body": extract_md(html2text(body_orig))
+ "body": html2text(body_orig),
}
with local_session() as session:
diff --git a/migration/tables/users.py b/migration/tables/users.py
index 3ccf9029..300c2e03 100644
--- a/migration/tables/users.py
+++ b/migration/tables/users.py
@@ -8,7 +8,7 @@ from base.orm import local_session
from orm.user import AuthorFollower, User, UserRating
-def migrate(entry):
+def migrate(entry): # noqa: C901
if "subscribedTo" in entry:
del entry["subscribedTo"]
email = entry["emails"][0]["address"]
@@ -23,7 +23,7 @@ def migrate(entry):
"muted": False, # amnesty
"links": [],
"name": "anonymous",
- "password": entry["services"]["password"].get("bcrypt")
+ "password": entry["services"]["password"].get("bcrypt"),
}
if "updatedAt" in entry:
@@ -33,9 +33,13 @@ def migrate(entry):
if entry.get("profile"):
# slug
slug = entry["profile"].get("path").lower()
- slug = re.sub('[^0-9a-zA-Z]+', '-', slug).strip()
+ slug = re.sub("[^0-9a-zA-Z]+", "-", slug).strip()
user_dict["slug"] = slug
- bio = (entry.get("profile", {"bio": ""}).get("bio") or "").replace('\(', '(').replace('\)', ')')
+ bio = (
+ (entry.get("profile", {"bio": ""}).get("bio") or "")
+ .replace(r"\(", "(")
+ .replace(r"\)", ")")
+ )
bio_text = BeautifulSoup(bio, features="lxml").text
if len(bio_text) > 120:
@@ -46,8 +50,7 @@ def migrate(entry):
# userpic
try:
user_dict["userpic"] = (
- "https://images.discours.io/unsafe/"
- + entry["profile"]["thumborId"]
+ "https://images.discours.io/unsafe/" + entry["profile"]["thumborId"]
)
except KeyError:
try:
@@ -62,11 +65,7 @@ def migrate(entry):
name = (name + " " + ln) if ln else name
if not name:
name = slug if slug else "anonymous"
- name = (
- entry["profile"]["path"].lower().strip().replace(" ", "-")
- if len(name) < 2
- else name
- )
+ name = entry["profile"]["path"].lower().strip().replace(" ", "-") if len(name) < 2 else name
user_dict["name"] = name
# links
@@ -95,9 +94,7 @@ def migrate(entry):
except IntegrityError:
print("[migration] cannot create user " + user_dict["slug"])
with local_session() as session:
- old_user = (
- session.query(User).filter(User.slug == user_dict["slug"]).first()
- )
+ old_user = session.query(User).filter(User.slug == user_dict["slug"]).first()
old_user.oid = oid
old_user.password = user_dict["password"]
session.commit()
@@ -114,7 +111,7 @@ def post_migrate():
"slug": "old-discours",
"username": "old-discours",
"email": "old@discours.io",
- "name": "Просмотры на старой версии сайта"
+ "name": "Просмотры на старой версии сайта",
}
with local_session() as session:
@@ -147,12 +144,8 @@ def migrate_2stage(entry, id_map):
}
user_rating = UserRating.create(**user_rating_dict)
- if user_rating_dict['value'] > 0:
- af = AuthorFollower.create(
- author=user.id,
- follower=rater.id,
- auto=True
- )
+ if user_rating_dict["value"] > 0:
+ af = AuthorFollower.create(author=user.id, follower=rater.id, auto=True)
session.add(af)
session.add(user_rating)
session.commit()
diff --git a/orm/__init__.py b/orm/__init__.py
index 53b13951..9f66f85c 100644
--- a/orm/__init__.py
+++ b/orm/__init__.py
@@ -1,7 +1,7 @@
from base.orm import Base, engine
from orm.community import Community
from orm.notification import Notification
-from orm.rbac import Operation, Resource, Permission, Role
+from orm.rbac import Operation, Permission, Resource, Role
from orm.reaction import Reaction
from orm.shout import Shout
from orm.topic import Topic, TopicFollower
@@ -32,5 +32,5 @@ __all__ = [
"Notification",
"Reaction",
"UserRating",
- "init_tables"
+ "init_tables",
]
diff --git a/orm/collection.py b/orm/collection.py
index c9975b62..8493844c 100644
--- a/orm/collection.py
+++ b/orm/collection.py
@@ -8,7 +8,7 @@ from base.orm import Base
class ShoutCollection(Base):
__tablename__ = "shout_collection"
- id = None # type: ignore
+ id = None
shout = Column(ForeignKey("shout.id"), primary_key=True)
collection = Column(ForeignKey("collection.id"), primary_key=True)
diff --git a/orm/community.py b/orm/community.py
index b55b857f..4cbfcc7a 100644
--- a/orm/community.py
+++ b/orm/community.py
@@ -1,18 +1,17 @@
from datetime import datetime
-from sqlalchemy import Column, String, ForeignKey, DateTime
+from sqlalchemy import Column, DateTime, ForeignKey, String
+
from base.orm import Base, local_session
class CommunityFollower(Base):
__tablename__ = "community_followers"
- id = None # type: ignore
- follower = Column(ForeignKey("user.id"), primary_key=True)
- community = Column(ForeignKey("community.id"), primary_key=True)
- joinedAt = Column(
- DateTime, nullable=False, default=datetime.now, comment="Created at"
- )
+ id = None
+ follower: Column = Column(ForeignKey("user.id"), primary_key=True)
+ community: Column = Column(ForeignKey("community.id"), primary_key=True)
+ joinedAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
# role = Column(ForeignKey(Role.id), nullable=False, comment="Role for member")
@@ -23,19 +22,15 @@ class Community(Base):
slug = Column(String, nullable=False, unique=True, comment="Slug")
desc = Column(String, nullable=False, default="")
pic = Column(String, nullable=False, default="")
- createdAt = Column(
- DateTime, nullable=False, default=datetime.now, comment="Created at"
- )
+ createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
@staticmethod
def init_table():
with local_session() as session:
- d = (
- session.query(Community).filter(Community.slug == "discours").first()
- )
+ d = session.query(Community).filter(Community.slug == "discours").first()
if not d:
d = Community.create(name="Дискурс", slug="discours")
session.add(d)
session.commit()
Community.default_community = d
- print('[orm] default community id: %s' % d.id)
+ print("[orm] default community id: %s" % d.id)
diff --git a/orm/notification.py b/orm/notification.py
index 25f4e4f3..429f07f2 100644
--- a/orm/notification.py
+++ b/orm/notification.py
@@ -1,9 +1,10 @@
from datetime import datetime
-from sqlalchemy import Column, Enum, ForeignKey, DateTime, Boolean, Integer
+from enum import Enum as Enumeration
+
+from sqlalchemy import Boolean, Column, DateTime, Enum, ForeignKey, Integer
from sqlalchemy.dialects.postgresql import JSONB
from base.orm import Base
-from enum import Enum as Enumeration
class NotificationType(Enumeration):
@@ -14,9 +15,9 @@ class NotificationType(Enumeration):
class Notification(Base):
__tablename__ = "notification"
- shout = Column(ForeignKey("shout.id"), index=True)
- reaction = Column(ForeignKey("reaction.id"), index=True)
- user = Column(ForeignKey("user.id"), index=True)
+ shout: Column = Column(ForeignKey("shout.id"), index=True)
+ reaction: Column = Column(ForeignKey("reaction.id"), index=True)
+ user: Column = Column(ForeignKey("user.id"), index=True)
createdAt = Column(DateTime, nullable=False, default=datetime.now, index=True)
seen = Column(Boolean, nullable=False, default=False, index=True)
type = Column(Enum(NotificationType), nullable=False)
diff --git a/orm/rbac.py b/orm/rbac.py
index 29ade72e..47abfb74 100644
--- a/orm/rbac.py
+++ b/orm/rbac.py
@@ -1,9 +1,9 @@
import warnings
-from sqlalchemy import String, Column, ForeignKey, UniqueConstraint, TypeDecorator
+from sqlalchemy import Column, ForeignKey, String, TypeDecorator, UniqueConstraint
from sqlalchemy.orm import relationship
-from base.orm import Base, REGISTRY, engine, local_session
+from base.orm import REGISTRY, Base, local_session
# Role Based Access Control #
@@ -121,16 +121,23 @@ class Operation(Base):
class Resource(Base):
__tablename__ = "resource"
- resourceClass = Column(
- String, nullable=False, unique=True, comment="Resource class"
- )
+ resourceClass = Column(String, nullable=False, unique=True, comment="Resource class")
name = Column(String, nullable=False, unique=True, comment="Resource name")
# TODO: community = Column(ForeignKey())
@staticmethod
def init_table():
with local_session() as session:
- for res in ["shout", "topic", "reaction", "chat", "message", "invite", "community", "user"]:
+ for res in [
+ "shout",
+ "topic",
+ "reaction",
+ "chat",
+ "message",
+ "invite",
+ "community",
+ "user",
+ ]:
r = session.query(Resource).filter(Resource.name == res).first()
if not r:
r = Resource.create(name=res, resourceClass=res)
@@ -145,29 +152,27 @@ class Permission(Base):
{"extend_existing": True},
)
- role = Column(
- ForeignKey("role.id", ondelete="CASCADE"), nullable=False, comment="Role"
- )
- operation = Column(
+ role: Column = Column(ForeignKey("role.id", ondelete="CASCADE"), nullable=False, comment="Role")
+ operation: Column = Column(
ForeignKey("operation.id", ondelete="CASCADE"),
nullable=False,
comment="Operation",
)
- resource = Column(
+ resource: Column = Column(
ForeignKey("resource.id", ondelete="CASCADE"),
nullable=False,
comment="Resource",
)
-if __name__ == "__main__":
- Base.metadata.create_all(engine)
- ops = [
- Permission(role=1, operation=1, resource=1),
- Permission(role=1, operation=2, resource=1),
- Permission(role=1, operation=3, resource=1),
- Permission(role=1, operation=4, resource=1),
- Permission(role=2, operation=4, resource=1),
- ]
- global_session.add_all(ops)
- global_session.commit()
+# if __name__ == "__main__":
+# Base.metadata.create_all(engine)
+# ops = [
+# Permission(role=1, operation=1, resource=1),
+# Permission(role=1, operation=2, resource=1),
+# Permission(role=1, operation=3, resource=1),
+# Permission(role=1, operation=4, resource=1),
+# Permission(role=2, operation=4, resource=1),
+# ]
+# global_session.add_all(ops)
+# global_session.commit()
diff --git a/orm/reaction.py b/orm/reaction.py
index 1c129e23..38520b72 100644
--- a/orm/reaction.py
+++ b/orm/reaction.py
@@ -27,16 +27,18 @@ class ReactionKind(Enumeration):
class Reaction(Base):
__tablename__ = "reaction"
body = Column(String, nullable=True, comment="Reaction Body")
- createdAt = Column(
- DateTime, nullable=False, default=datetime.now, comment="Created at"
- )
- createdBy = Column(ForeignKey("user.id"), nullable=False, index=True, comment="Sender")
+ createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
+ createdBy: Column = Column(ForeignKey("user.id"), nullable=False, index=True, comment="Sender")
updatedAt = Column(DateTime, nullable=True, comment="Updated at")
- updatedBy = Column(ForeignKey("user.id"), nullable=True, index=True, comment="Last Editor")
+ updatedBy: Column = Column(
+ ForeignKey("user.id"), nullable=True, index=True, comment="Last Editor"
+ )
deletedAt = Column(DateTime, nullable=True, comment="Deleted at")
- deletedBy = Column(ForeignKey("user.id"), nullable=True, index=True, comment="Deleted by")
- shout = Column(ForeignKey("shout.id"), nullable=False, index=True)
- replyTo = Column(
+ deletedBy: Column = Column(
+ ForeignKey("user.id"), nullable=True, index=True, comment="Deleted by"
+ )
+ shout: Column = Column(ForeignKey("shout.id"), nullable=False, index=True)
+ replyTo: Column = Column(
ForeignKey("reaction.id"), nullable=True, comment="Reply to reaction ID"
)
range = Column(String, nullable=True, comment="Range in format
:")
diff --git a/orm/shout.py b/orm/shout.py
index 22381d4c..b1300ec6 100644
--- a/orm/shout.py
+++ b/orm/shout.py
@@ -1,6 +1,6 @@
from datetime import datetime
-from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, String, JSON
+from sqlalchemy import JSON, Boolean, Column, DateTime, ForeignKey, Integer, String
from sqlalchemy.orm import column_property, relationship
from base.orm import Base, local_session
@@ -12,31 +12,29 @@ from orm.user import User
class ShoutTopic(Base):
__tablename__ = "shout_topic"
- id = None # type: ignore
- shout = Column(ForeignKey("shout.id"), primary_key=True, index=True)
- topic = Column(ForeignKey("topic.id"), primary_key=True, index=True)
+ id = None
+ shout: Column = Column(ForeignKey("shout.id"), primary_key=True, index=True)
+ topic: Column = Column(ForeignKey("topic.id"), primary_key=True, index=True)
class ShoutReactionsFollower(Base):
__tablename__ = "shout_reactions_followers"
- id = None # type: ignore
- follower = Column(ForeignKey("user.id"), primary_key=True, index=True)
- shout = Column(ForeignKey("shout.id"), primary_key=True, index=True)
+ id = None
+ follower: Column = Column(ForeignKey("user.id"), primary_key=True, index=True)
+ shout: Column = Column(ForeignKey("shout.id"), primary_key=True, index=True)
auto = Column(Boolean, nullable=False, default=False)
- createdAt = Column(
- DateTime, nullable=False, default=datetime.now, comment="Created at"
- )
+ createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
deletedAt = Column(DateTime, nullable=True)
class ShoutAuthor(Base):
__tablename__ = "shout_author"
- id = None # type: ignore
- shout = Column(ForeignKey("shout.id"), primary_key=True, index=True)
- user = Column(ForeignKey("user.id"), primary_key=True, index=True)
- caption = Column(String, nullable=True, default="")
+ id = None
+ shout: Column = Column(ForeignKey("shout.id"), primary_key=True, index=True)
+ user: Column = Column(ForeignKey("user.id"), primary_key=True, index=True)
+ caption: Column = Column(String, nullable=True, default="")
class Shout(Base):
@@ -48,8 +46,8 @@ class Shout(Base):
publishedAt = Column(DateTime, nullable=True)
deletedAt = Column(DateTime, nullable=True)
- createdBy = Column(ForeignKey("user.id"), comment="Created By")
- deletedBy = Column(ForeignKey("user.id"), nullable=True)
+ createdBy: Column = Column(ForeignKey("user.id"), comment="Created By")
+ deletedBy: Column = Column(ForeignKey("user.id"), nullable=True)
slug = Column(String, unique=True)
cover = Column(String, nullable=True, comment="Cover image url")
@@ -71,11 +69,11 @@ class Shout(Base):
reactions = relationship(lambda: Reaction)
# TODO: these field should be used or modified
- community = Column(ForeignKey("community.id"), default=1)
- lang = Column(String, nullable=False, default='ru', comment="Language")
- mainTopic = Column(ForeignKey("topic.slug"), nullable=True)
+ community: Column = Column(ForeignKey("community.id"), default=1)
+ lang = Column(String, nullable=False, default="ru", comment="Language")
+ mainTopic: Column = Column(ForeignKey("topic.slug"), nullable=True)
visibility = Column(String, nullable=True) # owner authors community public
- versionOf = Column(ForeignKey("shout.id"), nullable=True)
+ versionOf: Column = Column(ForeignKey("shout.id"), nullable=True)
oid = Column(String, nullable=True)
@staticmethod
@@ -83,12 +81,7 @@ class Shout(Base):
with local_session() as session:
s = session.query(Shout).first()
if not s:
- entry = {
- "slug": "genesis-block",
- "body": "",
- "title": "Ничего",
- "lang": "ru"
- }
+ entry = {"slug": "genesis-block", "body": "", "title": "Ничего", "lang": "ru"}
s = Shout.create(**entry)
session.add(s)
session.commit()
diff --git a/orm/topic.py b/orm/topic.py
index a37dc69a..375d5834 100644
--- a/orm/topic.py
+++ b/orm/topic.py
@@ -8,12 +8,10 @@ from base.orm import Base
class TopicFollower(Base):
__tablename__ = "topic_followers"
- id = None # type: ignore
- follower = Column(ForeignKey("user.id"), primary_key=True, index=True)
- topic = Column(ForeignKey("topic.id"), primary_key=True, index=True)
- createdAt = Column(
- DateTime, nullable=False, default=datetime.now, comment="Created at"
- )
+ id = None
+ follower: Column = Column(ForeignKey("user.id"), primary_key=True, index=True)
+ topic: Column = Column(ForeignKey("topic.id"), primary_key=True, index=True)
+ createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
auto = Column(Boolean, nullable=False, default=False)
@@ -24,7 +22,5 @@ class Topic(Base):
title = Column(String, nullable=False, comment="Title")
body = Column(String, nullable=True, comment="Body")
pic = Column(String, nullable=True, comment="Picture")
- community = Column(
- ForeignKey("community.id"), default=1, comment="Community"
- )
+ community: Column = Column(ForeignKey("community.id"), default=1, comment="Community")
oid = Column(String, nullable=True, comment="Old ID")
diff --git a/orm/user.py b/orm/user.py
index 5aeab90e..5379b586 100644
--- a/orm/user.py
+++ b/orm/user.py
@@ -3,6 +3,7 @@ from datetime import datetime
from sqlalchemy import JSON as JSONType
from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, String
from sqlalchemy.orm import relationship
+
from base.orm import Base, local_session
from orm.rbac import Role
@@ -10,10 +11,10 @@ from orm.rbac import Role
class UserRating(Base):
__tablename__ = "user_rating"
- id = None # type: ignore
- rater = Column(ForeignKey("user.id"), primary_key=True, index=True)
- user = Column(ForeignKey("user.id"), primary_key=True, index=True)
- value = Column(Integer)
+ id = None
+ rater: Column = Column(ForeignKey("user.id"), primary_key=True, index=True)
+ user: Column = Column(ForeignKey("user.id"), primary_key=True, index=True)
+ value: Column = Column(Integer)
@staticmethod
def init_table():
@@ -23,7 +24,7 @@ class UserRating(Base):
class UserRole(Base):
__tablename__ = "user_role"
- id = None # type: ignore
+ id = None
user = Column(ForeignKey("user.id"), primary_key=True, index=True)
role = Column(ForeignKey("role.id"), primary_key=True, index=True)
@@ -31,12 +32,10 @@ class UserRole(Base):
class AuthorFollower(Base):
__tablename__ = "author_follower"
- id = None # type: ignore
- follower = Column(ForeignKey("user.id"), primary_key=True, index=True)
- author = Column(ForeignKey("user.id"), primary_key=True, index=True)
- createdAt = Column(
- DateTime, nullable=False, default=datetime.now, comment="Created at"
- )
+ id = None
+ follower: Column = Column(ForeignKey("user.id"), primary_key=True, index=True)
+ author: Column = Column(ForeignKey("user.id"), primary_key=True, index=True)
+ createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
auto = Column(Boolean, nullable=False, default=False)
@@ -54,12 +53,8 @@ class User(Base):
slug = Column(String, unique=True, comment="User's slug")
muted = Column(Boolean, default=False)
emailConfirmed = Column(Boolean, default=False)
- createdAt = Column(
- DateTime, nullable=False, default=datetime.now, comment="Created at"
- )
- lastSeen = Column(
- DateTime, nullable=False, default=datetime.now, comment="Was online at"
- )
+ createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
+ lastSeen = Column(DateTime, nullable=False, default=datetime.now, comment="Was online at")
deletedAt = Column(DateTime, nullable=True, comment="Deleted at")
links = Column(JSONType, nullable=True, comment="Links")
oauth = Column(String, nullable=True)
@@ -103,4 +98,4 @@ class User(Base):
# if __name__ == "__main__":
-# print(User.get_permission(user_id=1)) # type: ignore
+# print(User.get_permission(user_id=1))
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 00000000..aa4949aa
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,2 @@
+[tool.black]
+line-length = 100
diff --git a/requirements-dev.txt b/requirements-dev.txt
index d221f3b0..9ff65109 100755
--- a/requirements-dev.txt
+++ b/requirements-dev.txt
@@ -1,4 +1,8 @@
-isort
-brunette
-flake8
-mypy
+black==23.10.1
+flake8==6.1.0
+gql_schema_codegen==1.0.1
+isort==5.12.0
+mypy==1.6.1
+pre-commit==3.5.0
+pymongo-stubs==0.2.0
+sqlalchemy-stubs==0.4
diff --git a/requirements.txt b/requirements.txt
index edbf46ff..af3eee5f 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,40 +1,37 @@
-python-frontmatter~=1.0.0
-aioredis~=2.0.1
aiohttp
+aioredis~=2.0.1
+alembic==1.11.3
ariadne>=0.17.0
-PyYAML>=5.4
-pyjwt>=2.6.0
-starlette~=0.23.1
-sqlalchemy>=1.4.41
-graphql-core>=3.0.3
-gql~=3.4.0
-uvicorn>=0.18.3
-pydantic>=1.10.2
-passlib~=1.7.4
-authlib>=1.1.0
-httpx>=0.23.0
-psycopg2-binary
-transliterate~=1.10.2
-requests~=2.28.1
-bcrypt>=4.0.0
-bson~=0.5.10
-flake8
-DateTime~=4.7
asyncio~=3.4.3
-python-dateutil~=2.8.2
+authlib>=1.1.0
+bcrypt>=4.0.0
beautifulsoup4~=4.11.1
-lxml
-sentry-sdk>=1.14.0
-# sse_starlette
-graphql-ws
-nltk~=3.8.1
-pymystem3~=0.2.0
-transformers~=4.28.1
boto3~=1.28.2
botocore~=1.31.2
-python-multipart~=0.0.6
-alembic==1.11.3
+bson~=0.5.10
+DateTime~=4.7
+gql~=3.4.0
+graphql-core>=3.0.3
+httpx>=0.23.0
+itsdangerous
+lxml
Mako==1.2.4
MarkupSafe==2.1.3
+nltk~=3.8.1
+passlib~=1.7.4
+psycopg2-binary
+pydantic>=1.10.2
+pyjwt>=2.6.0
+pymystem3~=0.2.0
+python-dateutil~=2.8.2
+python-frontmatter~=1.0.0
+python-multipart~=0.0.6
+PyYAML>=5.4
+requests~=2.28.1
+sentry-sdk>=1.14.0
+sqlalchemy>=1.4.41
sse-starlette==1.6.5
-itsdangerous
+starlette~=0.23.1
+transformers~=4.28.1
+transliterate~=1.10.2
+uvicorn>=0.18.3
diff --git a/resetdb.sh b/resetdb.sh
index 39b3b9b2..40ba2e37 100755
--- a/resetdb.sh
+++ b/resetdb.sh
@@ -53,4 +53,3 @@ echo "Start migration"
python3 server.py migrate
if [ $? -ne 0 ]; then { echo "Migration failed, aborting." ; exit 1; } fi
echo 'Done!'
-
diff --git a/resolvers/__init__.py b/resolvers/__init__.py
index 5d753ac4..9f4bf0bd 100644
--- a/resolvers/__init__.py
+++ b/resolvers/__init__.py
@@ -1,67 +1,46 @@
+# flake8: noqa
+
from resolvers.auth import (
- login,
- sign_out,
- is_email_used,
- register_by_email,
- confirm_email,
auth_send_link,
+ confirm_email,
get_current_user,
+ is_email_used,
+ login,
+ register_by_email,
+ sign_out,
)
-
-from resolvers.create.migrate import markdown_body
from resolvers.create.editor import create_shout, delete_shout, update_shout
-
-from resolvers.zine.profile import (
- load_authors_by,
- rate_user,
- update_profile,
- get_authors_all
-)
-
-from resolvers.zine.reactions import (
- create_reaction,
- delete_reaction,
- update_reaction,
- reactions_unfollow,
- reactions_follow,
- load_reactions_by
-)
-from resolvers.zine.topics import (
- topic_follow,
- topic_unfollow,
- topics_by_author,
- topics_by_community,
- topics_all,
- get_topic
-)
-
-from resolvers.zine.following import (
- follow,
- unfollow
-)
-
-from resolvers.zine.load import (
- load_shout,
- load_shouts_by
-)
-
-from resolvers.inbox.chats import (
- create_chat,
- delete_chat,
- update_chat
-
-)
+from resolvers.inbox.chats import create_chat, delete_chat, update_chat
+from resolvers.inbox.load import load_chats, load_messages_by, load_recipients
from resolvers.inbox.messages import (
create_message,
delete_message,
+ mark_as_read,
update_message,
- mark_as_read
-)
-from resolvers.inbox.load import (
- load_chats,
- load_messages_by,
- load_recipients
)
from resolvers.inbox.search import search_recipients
-
from resolvers.notifications import load_notifications
+from resolvers.zine.following import follow, unfollow
+from resolvers.zine.load import load_shout, load_shouts_by
+from resolvers.zine.profile import (
+ get_authors_all,
+ load_authors_by,
+ rate_user,
+ update_profile,
+)
+from resolvers.zine.reactions import (
+ create_reaction,
+ delete_reaction,
+ load_reactions_by,
+ reactions_follow,
+ reactions_unfollow,
+ update_reaction,
+)
+from resolvers.zine.topics import (
+ get_topic,
+ topic_follow,
+ topic_unfollow,
+ topics_all,
+ topics_by_author,
+ topics_by_community,
+)
diff --git a/resolvers/auth.py b/resolvers/auth.py
index 17369b7a..4900e6c0 100644
--- a/resolvers/auth.py
+++ b/resolvers/auth.py
@@ -1,24 +1,30 @@
# -*- coding: utf-8 -*-
+import re
from datetime import datetime, timezone
from urllib.parse import quote_plus
from graphql.type import GraphQLResolveInfo
from starlette.responses import RedirectResponse
from transliterate import translit
-import re
+
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
from auth.email import send_auth_email
from auth.identity import Identity, Password
from auth.jwtcodec import JWTCodec
from auth.tokenstorage import TokenStorage
-from base.exceptions import (BaseHttpException, InvalidPassword, InvalidToken,
- ObjectNotExist, Unauthorized)
+from base.exceptions import (
+ BaseHttpException,
+ InvalidPassword,
+ InvalidToken,
+ ObjectNotExist,
+ Unauthorized,
+)
from base.orm import local_session
from base.resolvers import mutation, query
from orm import Role, User
-from settings import SESSION_TOKEN_HEADER, FRONTEND_URL
+from settings import FRONTEND_URL, SESSION_TOKEN_HEADER
@mutation.field("getSession")
@@ -32,17 +38,14 @@ async def get_current_user(_, info):
user.lastSeen = datetime.now(tz=timezone.utc)
session.commit()
- return {
- "token": token,
- "user": user
- }
+ return {"token": token, "user": user}
@mutation.field("confirmEmail")
async def confirm_email(_, info, token):
"""confirm owning email address"""
try:
- print('[resolvers.auth] confirm email by token')
+ print("[resolvers.auth] confirm email by token")
payload = JWTCodec.decode(token)
user_id = payload.user_id
await TokenStorage.get(f"{user_id}-{payload.username}-{token}")
@@ -53,10 +56,7 @@ async def confirm_email(_, info, token):
user.lastSeen = datetime.now(tz=timezone.utc)
session.add(user)
session.commit()
- return {
- "token": session_token,
- "user": user
- }
+ return {"token": session_token, "user": user}
except InvalidToken as e:
raise InvalidToken(e.message)
except Exception as e:
@@ -68,9 +68,9 @@ async def confirm_email_handler(request):
token = request.path_params["token"] # one time
request.session["token"] = token
res = await confirm_email(None, {}, token)
- print('[resolvers.auth] confirm_email request: %r' % request)
+ print("[resolvers.auth] confirm_email request: %r" % request)
if "error" in res:
- raise BaseHttpException(res['error'])
+ raise BaseHttpException(res["error"])
else:
response = RedirectResponse(url=FRONTEND_URL)
response.set_cookie("token", res["token"]) # session token
@@ -87,22 +87,22 @@ def create_user(user_dict):
def generate_unique_slug(src):
- print('[resolvers.auth] generating slug from: ' + src)
+ print("[resolvers.auth] generating slug from: " + src)
slug = translit(src, "ru", reversed=True).replace(".", "-").lower()
- slug = re.sub('[^0-9a-zA-Z]+', '-', slug)
+ slug = re.sub("[^0-9a-zA-Z]+", "-", slug)
if slug != src:
- print('[resolvers.auth] translited name: ' + slug)
+ print("[resolvers.auth] translited name: " + slug)
c = 1
with local_session() as session:
user = session.query(User).where(User.slug == slug).first()
while user:
user = session.query(User).where(User.slug == slug).first()
- slug = slug + '-' + str(c)
+ slug = slug + "-" + str(c)
c += 1
if not user:
unique_slug = slug
- print('[resolvers.auth] ' + unique_slug)
- return quote_plus(unique_slug.replace('\'', '')).replace('+', '-')
+ print("[resolvers.auth] " + unique_slug)
+ return quote_plus(unique_slug.replace("'", "")).replace("+", "-")
@mutation.field("registerUser")
@@ -117,12 +117,12 @@ async def register_by_email(_, _info, email: str, password: str = "", name: str
slug = generate_unique_slug(name)
user = session.query(User).where(User.slug == slug).first()
if user:
- slug = generate_unique_slug(email.split('@')[0])
+ slug = generate_unique_slug(email.split("@")[0])
user_dict = {
"email": email,
"username": email, # will be used to store phone number or some messenger network id
"name": name,
- "slug": slug
+ "slug": slug,
}
if password:
user_dict["password"] = Password.encode(password)
@@ -172,10 +172,7 @@ async def login(_, info, email: str, password: str = "", lang: str = "ru"):
user = Identity.password(orm_user, password)
session_token = await TokenStorage.create_session(user)
print(f"[auth] user {email} authorized")
- return {
- "token": session_token,
- "user": user
- }
+ return {"token": session_token, "user": user}
except InvalidPassword:
print(f"[auth] {email}: invalid password")
raise InvalidPassword("invalid password") # contains webserver status
diff --git a/resolvers/create/editor.py b/resolvers/create/editor.py
index c81ff404..6266a95b 100644
--- a/resolvers/create/editor.py
+++ b/resolvers/create/editor.py
@@ -18,21 +18,23 @@ async def create_shout(_, info, inp):
auth: AuthCredentials = info.context["request"].auth
with local_session() as session:
- topics = session.query(Topic).filter(Topic.slug.in_(inp.get('topics', []))).all()
+ topics = session.query(Topic).filter(Topic.slug.in_(inp.get("topics", []))).all()
- new_shout = Shout.create(**{
- "title": inp.get("title"),
- "subtitle": inp.get('subtitle'),
- "lead": inp.get('lead'),
- "description": inp.get('description'),
- "body": inp.get("body", ''),
- "layout": inp.get("layout"),
- "authors": inp.get("authors", []),
- "slug": inp.get("slug"),
- "mainTopic": inp.get("mainTopic"),
- "visibility": "owner",
- "createdBy": auth.user_id
- })
+ new_shout = Shout.create(
+ **{
+ "title": inp.get("title"),
+ "subtitle": inp.get("subtitle"),
+ "lead": inp.get("lead"),
+ "description": inp.get("description"),
+ "body": inp.get("body", ""),
+ "layout": inp.get("layout"),
+ "authors": inp.get("authors", []),
+ "slug": inp.get("slug"),
+ "mainTopic": inp.get("mainTopic"),
+ "visibility": "owner",
+ "createdBy": auth.user_id,
+ }
+ )
for topic in topics:
t = ShoutTopic.create(topic=topic.id, shout=new_shout.id)
@@ -60,14 +62,19 @@ async def create_shout(_, info, inp):
@mutation.field("updateShout")
@login_required
-async def update_shout(_, info, shout_id, shout_input=None, publish=False):
+async def update_shout(_, info, shout_id, shout_input=None, publish=False): # noqa: C901
auth: AuthCredentials = info.context["request"].auth
with local_session() as session:
- shout = session.query(Shout).options(
- joinedload(Shout.authors),
- joinedload(Shout.topics),
- ).filter(Shout.id == shout_id).first()
+ shout = (
+ session.query(Shout)
+ .options(
+ joinedload(Shout.authors),
+ joinedload(Shout.topics),
+ )
+ .filter(Shout.id == shout_id)
+ .first()
+ )
if not shout:
return {"error": "shout not found"}
@@ -94,25 +101,34 @@ async def update_shout(_, info, shout_id, shout_input=None, publish=False):
session.commit()
for new_topic_to_link in new_topics_to_link:
- created_unlinked_topic = ShoutTopic.create(shout=shout.id, topic=new_topic_to_link.id)
+ created_unlinked_topic = ShoutTopic.create(
+ shout=shout.id, topic=new_topic_to_link.id
+ )
session.add(created_unlinked_topic)
- existing_topics_input = [topic_input for topic_input in topics_input if topic_input.get("id", 0) > 0]
- existing_topic_to_link_ids = [existing_topic_input["id"] for existing_topic_input in existing_topics_input
- if existing_topic_input["id"] not in [topic.id for topic in shout.topics]]
+ existing_topics_input = [
+ topic_input for topic_input in topics_input if topic_input.get("id", 0) > 0
+ ]
+ existing_topic_to_link_ids = [
+ existing_topic_input["id"]
+ for existing_topic_input in existing_topics_input
+ if existing_topic_input["id"] not in [topic.id for topic in shout.topics]
+ ]
for existing_topic_to_link_id in existing_topic_to_link_ids:
- created_unlinked_topic = ShoutTopic.create(shout=shout.id, topic=existing_topic_to_link_id)
+ created_unlinked_topic = ShoutTopic.create(
+ shout=shout.id, topic=existing_topic_to_link_id
+ )
session.add(created_unlinked_topic)
- topic_to_unlink_ids = [topic.id for topic in shout.topics
- if topic.id not in [topic_input["id"] for topic_input in existing_topics_input]]
+ topic_to_unlink_ids = [
+ topic.id
+ for topic in shout.topics
+ if topic.id not in [topic_input["id"] for topic_input in existing_topics_input]
+ ]
shout_topics_to_remove = session.query(ShoutTopic).filter(
- and_(
- ShoutTopic.shout == shout.id,
- ShoutTopic.topic.in_(topic_to_unlink_ids)
- )
+ and_(ShoutTopic.shout == shout.id, ShoutTopic.topic.in_(topic_to_unlink_ids))
)
for shout_topic_to_remove in shout_topics_to_remove:
@@ -120,13 +136,13 @@ async def update_shout(_, info, shout_id, shout_input=None, publish=False):
shout_input["mainTopic"] = shout_input["mainTopic"]["slug"]
- if shout_input["mainTopic"] == '':
+ if shout_input["mainTopic"] == "":
del shout_input["mainTopic"]
shout.update(shout_input)
updated = True
- if publish and shout.visibility == 'owner':
+ if publish and shout.visibility == "owner":
shout.visibility = "community"
shout.publishedAt = datetime.now(tz=timezone.utc)
updated = True
diff --git a/resolvers/create/migrate.py b/resolvers/create/migrate.py
deleted file mode 100644
index f16341f0..00000000
--- a/resolvers/create/migrate.py
+++ /dev/null
@@ -1,11 +0,0 @@
-
-from base.resolvers import query
-from resolvers.auth import login_required
-from migration.extract import extract_md
-
-
-@login_required
-@query.field("markdownBody")
-def markdown_body(_, info, body: str):
- body = extract_md(body)
- return body
diff --git a/resolvers/inbox/chats.py b/resolvers/inbox/chats.py
index 853defab..1a246b1c 100644
--- a/resolvers/inbox/chats.py
+++ b/resolvers/inbox/chats.py
@@ -24,27 +24,24 @@ async def update_chat(_, info, chat_new: Chat):
chat_id = chat_new["id"]
chat = await redis.execute("GET", f"chats/{chat_id}")
if not chat:
- return {
- "error": "chat not exist"
- }
+ return {"error": "chat not exist"}
chat = dict(json.loads(chat))
# TODO
if auth.user_id in chat["admins"]:
- chat.update({
- "title": chat_new.get("title", chat["title"]),
- "description": chat_new.get("description", chat["description"]),
- "updatedAt": int(datetime.now(tz=timezone.utc).timestamp()),
- "admins": chat_new.get("admins", chat.get("admins") or []),
- "users": chat_new.get("users", chat["users"])
- })
+ chat.update(
+ {
+ "title": chat_new.get("title", chat["title"]),
+ "description": chat_new.get("description", chat["description"]),
+ "updatedAt": int(datetime.now(tz=timezone.utc).timestamp()),
+ "admins": chat_new.get("admins", chat.get("admins") or []),
+ "users": chat_new.get("users", chat["users"]),
+ }
+ )
await redis.execute("SET", f"chats/{chat.id}", json.dumps(chat))
await redis.execute("COMMIT")
- return {
- "error": None,
- "chat": chat
- }
+ return {"error": None, "chat": chat}
@mutation.field("createChat")
@@ -52,7 +49,7 @@ async def update_chat(_, info, chat_new: Chat):
async def create_chat(_, info, title="", members=[]):
auth: AuthCredentials = info.context["request"].auth
chat = {}
- print('create_chat members: %r' % members)
+ print("create_chat members: %r" % members)
if auth.user_id not in members:
members.append(int(auth.user_id))
@@ -74,15 +71,12 @@ async def create_chat(_, info, title="", members=[]):
chat = await redis.execute("GET", f"chats/{c.decode('utf-8')}")
if chat:
chat = json.loads(chat)
- if chat['title'] == "":
- print('[inbox] createChat found old chat')
+ if chat["title"] == "":
+ print("[inbox] createChat found old chat")
print(chat)
break
if chat:
- return {
- "chat": chat,
- "error": "existed"
- }
+ return {"chat": chat, "error": "existed"}
chat_id = str(uuid.uuid4())
chat = {
@@ -92,7 +86,7 @@ async def create_chat(_, info, title="", members=[]):
"createdBy": auth.user_id,
"createdAt": int(datetime.now(tz=timezone.utc).timestamp()),
"updatedAt": int(datetime.now(tz=timezone.utc).timestamp()),
- "admins": members if (len(members) == 2 and title == "") else []
+ "admins": members if (len(members) == 2 and title == "") else [],
}
for m in members:
@@ -100,10 +94,7 @@ async def create_chat(_, info, title="", members=[]):
await redis.execute("SET", f"chats/{chat_id}", json.dumps(chat))
await redis.execute("SET", f"chats/{chat_id}/next_message_id", str(0))
await redis.execute("COMMIT")
- return {
- "error": None,
- "chat": chat
- }
+ return {"error": None, "chat": chat}
@mutation.field("deleteChat")
@@ -114,11 +105,9 @@ async def delete_chat(_, info, chat_id: str):
chat = await redis.execute("GET", f"/chats/{chat_id}")
if chat:
chat = dict(json.loads(chat))
- if auth.user_id in chat['admins']:
+ if auth.user_id in chat["admins"]:
await redis.execute("DEL", f"chats/{chat_id}")
await redis.execute("SREM", "chats_by_user/" + str(auth.user_id), chat_id)
await redis.execute("COMMIT")
else:
- return {
- "error": "chat not exist"
- }
+ return {"error": "chat not exist"}
diff --git a/resolvers/inbox/load.py b/resolvers/inbox/load.py
index a0d41721..4322da11 100644
--- a/resolvers/inbox/load.py
+++ b/resolvers/inbox/load.py
@@ -1,28 +1,27 @@
import json
-# from datetime import datetime, timedelta, timezone
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
-from base.redis import redis
from base.orm import local_session
+from base.redis import redis
from base.resolvers import query
from orm.user import User
from resolvers.zine.profile import followed_authors
+
from .unread import get_unread_counter
+# from datetime import datetime, timedelta, timezone
+
async def load_messages(chat_id: str, limit: int = 5, offset: int = 0, ids=[]):
- ''' load :limit messages for :chat_id with :offset '''
+ """load :limit messages for :chat_id with :offset"""
messages = []
message_ids = []
if ids:
message_ids += ids
try:
if limit:
- mids = await redis.lrange(f"chats/{chat_id}/message_ids",
- offset,
- offset + limit
- )
+ mids = await redis.lrange(f"chats/{chat_id}/message_ids", offset, offset + limit)
mids = [mid.decode("utf-8") for mid in mids]
message_ids += mids
except Exception as e:
@@ -30,10 +29,10 @@ async def load_messages(chat_id: str, limit: int = 5, offset: int = 0, ids=[]):
if message_ids:
message_keys = [f"chats/{chat_id}/messages/{mid}" for mid in message_ids]
messages = await redis.mget(*message_keys)
- messages = [json.loads(msg.decode('utf-8')) for msg in messages]
+ messages = [json.loads(msg.decode("utf-8")) for msg in messages]
replies = []
for m in messages:
- rt = m.get('replyTo')
+ rt = m.get("replyTo")
if rt:
rt = int(rt)
if rt not in message_ids:
@@ -46,14 +45,14 @@ async def load_messages(chat_id: str, limit: int = 5, offset: int = 0, ids=[]):
@query.field("loadChats")
@login_required
async def load_chats(_, info, limit: int = 50, offset: int = 0):
- """ load :limit chats of current user with :offset """
+ """load :limit chats of current user with :offset"""
auth: AuthCredentials = info.context["request"].auth
cids = await redis.execute("SMEMBERS", "chats_by_user/" + str(auth.user_id))
if cids:
- cids = list(cids)[offset:offset + limit]
+ cids = list(cids)[offset : offset + limit]
if not cids:
- print('[inbox.load] no chats were found')
+ print("[inbox.load] no chats were found")
cids = []
onliners = await redis.execute("SMEMBERS", "users-online")
if not onliners:
@@ -64,62 +63,50 @@ async def load_chats(_, info, limit: int = 50, offset: int = 0):
c = await redis.execute("GET", "chats/" + cid)
if c:
c = dict(json.loads(c))
- c['messages'] = await load_messages(cid, 5, 0)
- c['unread'] = await get_unread_counter(cid, auth.user_id)
+ c["messages"] = await load_messages(cid, 5, 0)
+ c["unread"] = await get_unread_counter(cid, auth.user_id)
with local_session() as session:
- c['members'] = []
+ c["members"] = []
for uid in c["users"]:
a = session.query(User).where(User.id == uid).first()
if a:
- c['members'].append({
- "id": a.id,
- "slug": a.slug,
- "userpic": a.userpic,
- "name": a.name,
- "lastSeen": a.lastSeen,
- "online": a.id in onliners
- })
+ c["members"].append(
+ {
+ "id": a.id,
+ "slug": a.slug,
+ "userpic": a.userpic,
+ "name": a.name,
+ "lastSeen": a.lastSeen,
+ "online": a.id in onliners,
+ }
+ )
chats.append(c)
- return {
- "chats": chats,
- "error": None
- }
+ return {"chats": chats, "error": None}
@query.field("loadMessagesBy")
@login_required
async def load_messages_by(_, info, by, limit: int = 10, offset: int = 0):
- ''' load :limit messages of :chat_id with :offset '''
+ """load :limit messages of :chat_id with :offset"""
auth: AuthCredentials = info.context["request"].auth
userchats = await redis.execute("SMEMBERS", "chats_by_user/" + str(auth.user_id))
- userchats = [c.decode('utf-8') for c in userchats]
+ userchats = [c.decode("utf-8") for c in userchats]
# print('[inbox] userchats: %r' % userchats)
if userchats:
# print('[inbox] loading messages by...')
messages = []
- by_chat = by.get('chat')
+ by_chat = by.get("chat")
if by_chat in userchats:
chat = await redis.execute("GET", f"chats/{by_chat}")
# print(chat)
if not chat:
- return {
- "messages": [],
- "error": "chat not exist"
- }
+ return {"messages": [], "error": "chat not exist"}
# everyone's messages in filtered chat
messages = await load_messages(by_chat, limit, offset)
- return {
- "messages": sorted(
- list(messages),
- key=lambda m: m['createdAt']
- ),
- "error": None
- }
+ return {"messages": sorted(list(messages), key=lambda m: m["createdAt"]), "error": None}
else:
- return {
- "error": "Cannot access messages of this chat"
- }
+ return {"error": "Cannot access messages of this chat"}
@query.field("loadRecipients")
@@ -138,15 +125,14 @@ async def load_recipients(_, info, limit=50, offset=0):
chat_users += session.query(User).where(User.emailConfirmed).limit(limit).offset(offset)
members = []
for a in chat_users:
- members.append({
- "id": a.id,
- "slug": a.slug,
- "userpic": a.userpic,
- "name": a.name,
- "lastSeen": a.lastSeen,
- "online": a.id in onliners
- })
- return {
- "members": members,
- "error": None
- }
+ members.append(
+ {
+ "id": a.id,
+ "slug": a.slug,
+ "userpic": a.userpic,
+ "name": a.name,
+ "lastSeen": a.lastSeen,
+ "online": a.id in onliners,
+ }
+ )
+ return {"members": members, "error": None}
diff --git a/resolvers/inbox/messages.py b/resolvers/inbox/messages.py
index 56187edf..c4d36c48 100644
--- a/resolvers/inbox/messages.py
+++ b/resolvers/inbox/messages.py
@@ -1,62 +1,54 @@
-import asyncio
import json
-from typing import Any
from datetime import datetime, timezone
-from graphql.type import GraphQLResolveInfo
+
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
from base.redis import redis
from base.resolvers import mutation
-from services.following import FollowingManager, FollowingResult, Following
-from validations.inbox import Message
+from services.following import FollowingManager, FollowingResult
@mutation.field("createMessage")
@login_required
async def create_message(_, info, chat: str, body: str, replyTo=None):
- """ create message with :body for :chat_id replying to :replyTo optionally """
+ """create message with :body for :chat_id replying to :replyTo optionally"""
auth: AuthCredentials = info.context["request"].auth
chat = await redis.execute("GET", f"chats/{chat}")
if not chat:
- return {
- "error": "chat is not exist"
- }
+ return {"error": "chat is not exist"}
else:
- chat = dict(json.loads(chat))
- message_id = await redis.execute("GET", f"chats/{chat['id']}/next_message_id")
+ chat_dict = dict(json.loads(chat))
+ message_id = await redis.execute("GET", f"chats/{chat_dict['id']}/next_message_id")
message_id = int(message_id)
new_message = {
- "chatId": chat['id'],
+ "chatId": chat_dict["id"],
"id": message_id,
"author": auth.user_id,
"body": body,
- "createdAt": int(datetime.now(tz=timezone.utc).timestamp())
+ "createdAt": int(datetime.now(tz=timezone.utc).timestamp()),
}
if replyTo:
- new_message['replyTo'] = replyTo
- chat['updatedAt'] = new_message['createdAt']
- await redis.execute("SET", f"chats/{chat['id']}", json.dumps(chat))
+ new_message["replyTo"] = replyTo
+ chat_dict["updatedAt"] = new_message["createdAt"]
+ await redis.execute("SET", f"chats/{chat_dict['id']}", json.dumps(chat))
print(f"[inbox] creating message {new_message}")
await redis.execute(
- "SET", f"chats/{chat['id']}/messages/{message_id}", json.dumps(new_message)
+ "SET", f"chats/{chat_dict['id']}/messages/{message_id}", json.dumps(new_message)
)
- await redis.execute("LPUSH", f"chats/{chat['id']}/message_ids", str(message_id))
- await redis.execute("SET", f"chats/{chat['id']}/next_message_id", str(message_id + 1))
+ await redis.execute("LPUSH", f"chats/{chat_dict['id']}/message_ids", str(message_id))
+ await redis.execute("SET", f"chats/{chat_dict['id']}/next_message_id", str(message_id + 1))
- users = chat["users"]
+ users = chat_dict["users"]
for user_slug in users:
await redis.execute(
- "LPUSH", f"chats/{chat['id']}/unread/{user_slug}", str(message_id)
+ "LPUSH", f"chats/{chat_dict['id']}/unread/{user_slug}", str(message_id)
)
- result = FollowingResult("NEW", 'chat', new_message)
- await FollowingManager.push('chat', result)
+ result = FollowingResult("NEW", "chat", new_message)
+ await FollowingManager.push("chat", result)
- return {
- "message": new_message,
- "error": None
- }
+ return {"message": new_message, "error": None}
@mutation.field("updateMessage")
@@ -81,13 +73,10 @@ async def update_message(_, info, chat_id: str, message_id: int, body: str):
await redis.execute("SET", f"chats/{chat_id}/messages/{message_id}", json.dumps(message))
- result = FollowingResult("UPDATED", 'chat', message)
- await FollowingManager.push('chat', result)
+ result = FollowingResult("UPDATED", "chat", message)
+ await FollowingManager.push("chat", result)
- return {
- "message": message,
- "error": None
- }
+ return {"message": message, "error": None}
@mutation.field("deleteMessage")
@@ -114,7 +103,7 @@ async def delete_message(_, info, chat_id: str, message_id: int):
for user_id in users:
await redis.execute("LREM", f"chats/{chat_id}/unread/{user_id}", 0, str(message_id))
- result = FollowingResult("DELETED", 'chat', message)
+ result = FollowingResult("DELETED", "chat", message)
await FollowingManager.push(result)
return {}
@@ -137,6 +126,4 @@ async def mark_as_read(_, info, chat_id: str, messages: [int]):
for message_id in messages:
await redis.execute("LREM", f"chats/{chat_id}/unread/{auth.user_id}", 0, str(message_id))
- return {
- "error": None
- }
+ return {"error": None}
diff --git a/resolvers/inbox/search.py b/resolvers/inbox/search.py
index 1ca340e5..6b9a5f1a 100644
--- a/resolvers/inbox/search.py
+++ b/resolvers/inbox/search.py
@@ -1,10 +1,11 @@
import json
-from datetime import datetime, timezone, timedelta
+from datetime import datetime, timedelta, timezone
+
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
+from base.orm import local_session
from base.redis import redis
from base.resolvers import query
-from base.orm import local_session
from orm.user import AuthorFollower, User
from resolvers.inbox.load import load_messages
@@ -17,7 +18,7 @@ async def search_recipients(_, info, query: str, limit: int = 50, offset: int =
auth: AuthCredentials = info.context["request"].auth
talk_before = await redis.execute("GET", f"/chats_by_user/{auth.user_id}")
if talk_before:
- talk_before = list(json.loads(talk_before))[offset:offset + limit]
+ talk_before = list(json.loads(talk_before))[offset : offset + limit]
for chat_id in talk_before:
members = await redis.execute("GET", f"/chats/{chat_id}/users")
if members:
@@ -31,23 +32,24 @@ async def search_recipients(_, info, query: str, limit: int = 50, offset: int =
with local_session() as session:
# followings
- result += session.query(AuthorFollower.author).join(
- User, User.id == AuthorFollower.follower
- ).where(
- User.slug.startswith(query)
- ).offset(offset + len(result)).limit(more_amount)
+ result += (
+ session.query(AuthorFollower.author)
+ .join(User, User.id == AuthorFollower.follower)
+ .where(User.slug.startswith(query))
+ .offset(offset + len(result))
+ .limit(more_amount)
+ )
more_amount = limit
# followers
- result += session.query(AuthorFollower.follower).join(
- User, User.id == AuthorFollower.author
- ).where(
- User.slug.startswith(query)
- ).offset(offset + len(result)).limit(offset + len(result) + limit)
- return {
- "members": list(result),
- "error": None
- }
+ result += (
+ session.query(AuthorFollower.follower)
+ .join(User, User.id == AuthorFollower.author)
+ .where(User.slug.startswith(query))
+ .offset(offset + len(result))
+ .limit(offset + len(result) + limit)
+ )
+ return {"members": list(result), "error": None}
@query.field("searchMessages")
@@ -57,22 +59,22 @@ async def search_user_chats(by, messages, user_id: int, limit, offset):
cids.union(set(await redis.execute("SMEMBERS", "chats_by_user/" + str(user_id))))
messages = []
- by_author = by.get('author')
+ by_author = by.get("author")
if by_author:
# all author's messages
cids.union(set(await redis.execute("SMEMBERS", f"chats_by_user/{by_author}")))
# author's messages in filtered chat
messages.union(set(filter(lambda m: m["author"] == by_author, list(messages))))
for c in cids:
- c = c.decode('utf-8')
+ c = c.decode("utf-8")
messages = await load_messages(c, limit, offset)
- body_like = by.get('body')
+ body_like = by.get("body")
if body_like:
# search in all messages in all user's chats
for c in cids:
# FIXME: use redis scan here
- c = c.decode('utf-8')
+ c = c.decode("utf-8")
mmm = await load_messages(c, limit, offset)
for m in mmm:
if body_like in m["body"]:
@@ -83,13 +85,12 @@ async def search_user_chats(by, messages, user_id: int, limit, offset):
days = by.get("days")
if days:
- messages.extend(filter(
- list(messages),
- key=lambda m: (
- datetime.now(tz=timezone.utc) - int(m["createdAt"]) < timedelta(days=by["days"])
+ messages.extend(
+ filter(
+ list(messages),
+ key=lambda m: (
+ datetime.now(tz=timezone.utc) - int(m["createdAt"]) < timedelta(days=by["days"])
+ ),
)
- ))
- return {
- "messages": messages,
- "error": None
- }
+ )
+ return {"messages": messages, "error": None}
diff --git a/resolvers/notifications.py b/resolvers/notifications.py
index 0cfc2244..9bc83c69 100644
--- a/resolvers/notifications.py
+++ b/resolvers/notifications.py
@@ -1,9 +1,9 @@
-from sqlalchemy import select, desc, and_, update
+from sqlalchemy import and_, desc, select, update
-from auth.credentials import AuthCredentials
-from base.resolvers import query, mutation
from auth.authenticate import login_required
+from auth.credentials import AuthCredentials
from base.orm import local_session
+from base.resolvers import mutation, query
from orm import Notification
@@ -16,25 +16,26 @@ async def load_notifications(_, info, params=None):
auth: AuthCredentials = info.context["request"].auth
user_id = auth.user_id
- limit = params.get('limit', 50)
- offset = params.get('offset', 0)
+ limit = params.get("limit", 50)
+ offset = params.get("offset", 0)
- q = select(Notification).where(
- Notification.user == user_id
- ).order_by(desc(Notification.createdAt)).limit(limit).offset(offset)
+ q = (
+ select(Notification)
+ .where(Notification.user == user_id)
+ .order_by(desc(Notification.createdAt))
+ .limit(limit)
+ .offset(offset)
+ )
notifications = []
with local_session() as session:
- total_count = session.query(Notification).where(
- Notification.user == user_id
- ).count()
+ total_count = session.query(Notification).where(Notification.user == user_id).count()
- total_unread_count = session.query(Notification).where(
- and_(
- Notification.user == user_id,
- Notification.seen == False
- )
- ).count()
+ total_unread_count = (
+ session.query(Notification)
+ .where(and_(Notification.user == user_id, Notification.seen == False)) # noqa: E712
+ .count()
+ )
for [notification] in session.execute(q):
notification.type = notification.type.name
@@ -43,7 +44,7 @@ async def load_notifications(_, info, params=None):
return {
"notifications": notifications,
"totalCount": total_count,
- "totalUnreadCount": total_unread_count
+ "totalUnreadCount": total_unread_count,
}
@@ -54,9 +55,11 @@ async def mark_notification_as_read(_, info, notification_id: int):
user_id = auth.user_id
with local_session() as session:
- notification = session.query(Notification).where(
- and_(Notification.id == notification_id, Notification.user == user_id)
- ).one()
+ notification = (
+ session.query(Notification)
+ .where(and_(Notification.id == notification_id, Notification.user == user_id))
+ .one()
+ )
notification.seen = True
session.commit()
@@ -69,12 +72,11 @@ async def mark_all_notifications_as_read(_, info):
auth: AuthCredentials = info.context["request"].auth
user_id = auth.user_id
- statement = update(Notification).where(
- and_(
- Notification.user == user_id,
- Notification.seen == False
- )
- ).values(seen=True)
+ statement = (
+ update(Notification)
+ .where(and_(Notification.user == user_id, Notification.seen == False)) # noqa: E712
+ .values(seen=True)
+ )
with local_session() as session:
try:
diff --git a/resolvers/upload.py b/resolvers/upload.py
index 44c7b81c..4205f473 100644
--- a/resolvers/upload.py
+++ b/resolvers/upload.py
@@ -2,33 +2,36 @@ import os
import shutil
import tempfile
import uuid
+
import boto3
from botocore.exceptions import BotoCoreError, ClientError
from starlette.responses import JSONResponse
-STORJ_ACCESS_KEY = os.environ.get('STORJ_ACCESS_KEY')
-STORJ_SECRET_KEY = os.environ.get('STORJ_SECRET_KEY')
-STORJ_END_POINT = os.environ.get('STORJ_END_POINT')
-STORJ_BUCKET_NAME = os.environ.get('STORJ_BUCKET_NAME')
-CDN_DOMAIN = os.environ.get('CDN_DOMAIN')
+STORJ_ACCESS_KEY = os.environ.get("STORJ_ACCESS_KEY")
+STORJ_SECRET_KEY = os.environ.get("STORJ_SECRET_KEY")
+STORJ_END_POINT = os.environ.get("STORJ_END_POINT")
+STORJ_BUCKET_NAME = os.environ.get("STORJ_BUCKET_NAME")
+CDN_DOMAIN = os.environ.get("CDN_DOMAIN")
async def upload_handler(request):
form = await request.form()
- file = form.get('file')
+ file = form.get("file")
if file is None:
- return JSONResponse({'error': 'No file uploaded'}, status_code=400)
+ return JSONResponse({"error": "No file uploaded"}, status_code=400)
file_name, file_extension = os.path.splitext(file.filename)
- key = 'files/' + str(uuid.uuid4()) + file_extension
+ key = "files/" + str(uuid.uuid4()) + file_extension
# Create an S3 client with Storj configuration
- s3 = boto3.client('s3',
- aws_access_key_id=STORJ_ACCESS_KEY,
- aws_secret_access_key=STORJ_SECRET_KEY,
- endpoint_url=STORJ_END_POINT)
+ s3 = boto3.client(
+ "s3",
+ aws_access_key_id=STORJ_ACCESS_KEY,
+ aws_secret_access_key=STORJ_SECRET_KEY,
+ endpoint_url=STORJ_END_POINT,
+ )
try:
# Save the uploaded file to a temporary file
@@ -39,18 +42,13 @@ async def upload_handler(request):
Filename=tmp_file.name,
Bucket=STORJ_BUCKET_NAME,
Key=key,
- ExtraArgs={
- "ContentType": file.content_type
- }
+ ExtraArgs={"ContentType": file.content_type},
)
- url = 'https://' + CDN_DOMAIN + '/' + key
+ url = "https://" + CDN_DOMAIN + "/" + key
- return JSONResponse({'url': url, 'originalFilename': file.filename})
+ return JSONResponse({"url": url, "originalFilename": file.filename})
except (BotoCoreError, ClientError) as e:
print(e)
- return JSONResponse({'error': 'Failed to upload file'}, status_code=500)
-
-
-
+ return JSONResponse({"error": "Failed to upload file"}, status_code=500)
diff --git a/resolvers/zine/following.py b/resolvers/zine/following.py
index 99481571..fc3656f8 100644
--- a/resolvers/zine/following.py
+++ b/resolvers/zine/following.py
@@ -1,41 +1,36 @@
-import asyncio
-from base.orm import local_session
-from base.resolvers import mutation
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
+from base.resolvers import mutation
+
# from resolvers.community import community_follow, community_unfollow
-from orm.user import AuthorFollower
-from orm.topic import TopicFollower
-from orm.shout import ShoutReactionsFollower
from resolvers.zine.profile import author_follow, author_unfollow
from resolvers.zine.reactions import reactions_follow, reactions_unfollow
from resolvers.zine.topics import topic_follow, topic_unfollow
-from services.following import Following, FollowingManager, FollowingResult
-from graphql.type import GraphQLResolveInfo
+from services.following import FollowingManager, FollowingResult
@mutation.field("follow")
@login_required
-async def follow(_, info, what, slug):
+async def follow(_, info, what, slug): # noqa: C901
auth: AuthCredentials = info.context["request"].auth
try:
if what == "AUTHOR":
if author_follow(auth.user_id, slug):
- result = FollowingResult("NEW", 'author', slug)
- await FollowingManager.push('author', result)
+ result = FollowingResult("NEW", "author", slug)
+ await FollowingManager.push("author", result)
elif what == "TOPIC":
if topic_follow(auth.user_id, slug):
- result = FollowingResult("NEW", 'topic', slug)
- await FollowingManager.push('topic', result)
+ result = FollowingResult("NEW", "topic", slug)
+ await FollowingManager.push("topic", result)
elif what == "COMMUNITY":
if False: # TODO: use community_follow(auth.user_id, slug):
- result = FollowingResult("NEW", 'community', slug)
- await FollowingManager.push('community', result)
+ result = FollowingResult("NEW", "community", slug)
+ await FollowingManager.push("community", result)
elif what == "REACTIONS":
if reactions_follow(auth.user_id, slug):
- result = FollowingResult("NEW", 'shout', slug)
- await FollowingManager.push('shout', result)
+ result = FollowingResult("NEW", "shout", slug)
+ await FollowingManager.push("shout", result)
except Exception as e:
print(Exception(e))
return {"error": str(e)}
@@ -45,26 +40,26 @@ async def follow(_, info, what, slug):
@mutation.field("unfollow")
@login_required
-async def unfollow(_, info, what, slug):
+async def unfollow(_, info, what, slug): # noqa: C901
auth: AuthCredentials = info.context["request"].auth
try:
if what == "AUTHOR":
if author_unfollow(auth.user_id, slug):
- result = FollowingResult("DELETED", 'author', slug)
- await FollowingManager.push('author', result)
+ result = FollowingResult("DELETED", "author", slug)
+ await FollowingManager.push("author", result)
elif what == "TOPIC":
if topic_unfollow(auth.user_id, slug):
- result = FollowingResult("DELETED", 'topic', slug)
- await FollowingManager.push('topic', result)
+ result = FollowingResult("DELETED", "topic", slug)
+ await FollowingManager.push("topic", result)
elif what == "COMMUNITY":
if False: # TODO: use community_unfollow(auth.user_id, slug):
- result = FollowingResult("DELETED", 'community', slug)
- await FollowingManager.push('community', result)
+ result = FollowingResult("DELETED", "community", slug)
+ await FollowingManager.push("community", result)
elif what == "REACTIONS":
if reactions_unfollow(auth.user_id, slug):
- result = FollowingResult("DELETED", 'shout', slug)
- await FollowingManager.push('shout', result)
+ result = FollowingResult("DELETED", "shout", slug)
+ await FollowingManager.push("shout", result)
except Exception as e:
return {"error": str(e)}
diff --git a/resolvers/zine/load.py b/resolvers/zine/load.py
index 4619efa6..95fac914 100644
--- a/resolvers/zine/load.py
+++ b/resolvers/zine/load.py
@@ -1,11 +1,11 @@
from datetime import datetime, timedelta, timezone
-from sqlalchemy.orm import joinedload, aliased
-from sqlalchemy.sql.expression import desc, asc, select, func, case, and_, text, nulls_last
+from sqlalchemy.orm import aliased, joinedload
+from sqlalchemy.sql.expression import and_, asc, case, desc, func, nulls_last, select
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
-from base.exceptions import ObjectNotExist, OperationNotAllowed
+from base.exceptions import ObjectNotExist
from base.orm import local_session
from base.resolvers import query
from orm import TopicFollower
@@ -18,37 +18,37 @@ def add_stat_columns(q):
aliased_reaction = aliased(Reaction)
q = q.outerjoin(aliased_reaction).add_columns(
- func.sum(
- aliased_reaction.id
- ).label('reacted_stat'),
+ func.sum(aliased_reaction.id).label("reacted_stat"),
+ func.sum(case((aliased_reaction.kind == ReactionKind.COMMENT, 1), else_=0)).label(
+ "commented_stat"
+ ),
func.sum(
case(
- (aliased_reaction.kind == ReactionKind.COMMENT, 1),
- else_=0
+ # do not count comments' reactions
+ (aliased_reaction.replyTo.is_not(None), 0),
+ (aliased_reaction.kind == ReactionKind.AGREE, 1),
+ (aliased_reaction.kind == ReactionKind.DISAGREE, -1),
+ (aliased_reaction.kind == ReactionKind.PROOF, 1),
+ (aliased_reaction.kind == ReactionKind.DISPROOF, -1),
+ (aliased_reaction.kind == ReactionKind.ACCEPT, 1),
+ (aliased_reaction.kind == ReactionKind.REJECT, -1),
+ (aliased_reaction.kind == ReactionKind.LIKE, 1),
+ (aliased_reaction.kind == ReactionKind.DISLIKE, -1),
+ else_=0,
)
- ).label('commented_stat'),
- func.sum(case(
- # do not count comments' reactions
- (aliased_reaction.replyTo.is_not(None), 0),
- (aliased_reaction.kind == ReactionKind.AGREE, 1),
- (aliased_reaction.kind == ReactionKind.DISAGREE, -1),
- (aliased_reaction.kind == ReactionKind.PROOF, 1),
- (aliased_reaction.kind == ReactionKind.DISPROOF, -1),
- (aliased_reaction.kind == ReactionKind.ACCEPT, 1),
- (aliased_reaction.kind == ReactionKind.REJECT, -1),
- (aliased_reaction.kind == ReactionKind.LIKE, 1),
- (aliased_reaction.kind == ReactionKind.DISLIKE, -1),
- else_=0)
- ).label('rating_stat'),
- func.max(case(
- (aliased_reaction.kind != ReactionKind.COMMENT, None),
- else_=aliased_reaction.createdAt
- )).label('last_comment'))
+ ).label("rating_stat"),
+ func.max(
+ case(
+ (aliased_reaction.kind != ReactionKind.COMMENT, None),
+ else_=aliased_reaction.createdAt,
+ )
+ ).label("last_comment"),
+ )
return q
-def apply_filters(q, filters, user_id=None):
+def apply_filters(q, filters, user_id=None): # noqa: C901
if filters.get("reacted") and user_id:
q.join(Reaction, Reaction.createdBy == user_id)
@@ -60,7 +60,7 @@ def apply_filters(q, filters, user_id=None):
if filters.get("layout"):
q = q.filter(Shout.layout == filters.get("layout"))
- if filters.get('excludeLayout'):
+ if filters.get("excludeLayout"):
q = q.filter(Shout.layout != filters.get("excludeLayout"))
if filters.get("author"):
q = q.filter(Shout.authors.any(slug=filters.get("author")))
@@ -87,27 +87,23 @@ async def load_shout(_, info, slug=None, shout_id=None):
q = add_stat_columns(q)
if slug is not None:
- q = q.filter(
- Shout.slug == slug
- )
+ q = q.filter(Shout.slug == slug)
if shout_id is not None:
- q = q.filter(
- Shout.id == shout_id
- )
+ q = q.filter(Shout.id == shout_id)
- q = q.filter(
- Shout.deletedAt.is_(None)
- ).group_by(Shout.id)
+ q = q.filter(Shout.deletedAt.is_(None)).group_by(Shout.id)
try:
- [shout, reacted_stat, commented_stat, rating_stat, last_comment] = session.execute(q).first()
+ [shout, reacted_stat, commented_stat, rating_stat, last_comment] = session.execute(
+ q
+ ).first()
shout.stat = {
"viewed": shout.views,
"reacted": reacted_stat,
"commented": commented_stat,
- "rating": rating_stat
+ "rating": rating_stat,
}
for author_caption in session.query(ShoutAuthor).join(Shout).where(Shout.slug == slug):
@@ -142,14 +138,13 @@ async def load_shouts_by(_, info, options):
:return: Shout[]
"""
- q = select(Shout).options(
- joinedload(Shout.authors),
- joinedload(Shout.topics),
- ).where(
- and_(
- Shout.deletedAt.is_(None),
- Shout.layout.is_not(None)
+ q = (
+ select(Shout)
+ .options(
+ joinedload(Shout.authors),
+ joinedload(Shout.topics),
)
+ .where(and_(Shout.deletedAt.is_(None), Shout.layout.is_not(None)))
)
q = add_stat_columns(q)
@@ -159,7 +154,7 @@ async def load_shouts_by(_, info, options):
order_by = options.get("order_by", Shout.publishedAt)
- query_order_by = desc(order_by) if options.get('order_by_desc', True) else asc(order_by)
+ query_order_by = desc(order_by) if options.get("order_by_desc", True) else asc(order_by)
offset = options.get("offset", 0)
limit = options.get("limit", 10)
@@ -169,13 +164,15 @@ async def load_shouts_by(_, info, options):
with local_session() as session:
shouts_map = {}
- for [shout, reacted_stat, commented_stat, rating_stat, last_comment] in session.execute(q).unique():
+ for [shout, reacted_stat, commented_stat, rating_stat, last_comment] in session.execute(
+ q
+ ).unique():
shouts.append(shout)
shout.stat = {
"viewed": shout.views,
"reacted": reacted_stat,
"commented": commented_stat,
- "rating": rating_stat
+ "rating": rating_stat,
}
shouts_map[shout.id] = shout
@@ -188,11 +185,13 @@ async def get_drafts(_, info):
auth: AuthCredentials = info.context["request"].auth
user_id = auth.user_id
- q = select(Shout).options(
- joinedload(Shout.authors),
- joinedload(Shout.topics),
- ).where(
- and_(Shout.deletedAt.is_(None), Shout.createdBy == user_id)
+ q = (
+ select(Shout)
+ .options(
+ joinedload(Shout.authors),
+ joinedload(Shout.topics),
+ )
+ .where(and_(Shout.deletedAt.is_(None), Shout.createdBy == user_id))
)
q = q.group_by(Shout.id)
@@ -211,24 +210,22 @@ async def get_my_feed(_, info, options):
auth: AuthCredentials = info.context["request"].auth
user_id = auth.user_id
- subquery = select(Shout.id).join(
- ShoutAuthor
- ).join(
- AuthorFollower, AuthorFollower.follower == user_id
- ).join(
- ShoutTopic
- ).join(
- TopicFollower, TopicFollower.follower == user_id
+ subquery = (
+ select(Shout.id)
+ .join(ShoutAuthor)
+ .join(AuthorFollower, AuthorFollower.follower == user_id)
+ .join(ShoutTopic)
+ .join(TopicFollower, TopicFollower.follower == user_id)
)
- q = select(Shout).options(
- joinedload(Shout.authors),
- joinedload(Shout.topics),
- ).where(
- and_(
- Shout.publishedAt.is_not(None),
- Shout.deletedAt.is_(None),
- Shout.id.in_(subquery)
+ q = (
+ select(Shout)
+ .options(
+ joinedload(Shout.authors),
+ joinedload(Shout.topics),
+ )
+ .where(
+ and_(Shout.publishedAt.is_not(None), Shout.deletedAt.is_(None), Shout.id.in_(subquery))
)
)
@@ -237,7 +234,7 @@ async def get_my_feed(_, info, options):
order_by = options.get("order_by", Shout.publishedAt)
- query_order_by = desc(order_by) if options.get('order_by_desc', True) else asc(order_by)
+ query_order_by = desc(order_by) if options.get("order_by_desc", True) else asc(order_by)
offset = options.get("offset", 0)
limit = options.get("limit", 10)
@@ -246,13 +243,15 @@ async def get_my_feed(_, info, options):
shouts = []
with local_session() as session:
shouts_map = {}
- for [shout, reacted_stat, commented_stat, rating_stat, last_comment] in session.execute(q).unique():
+ for [shout, reacted_stat, commented_stat, rating_stat, last_comment] in session.execute(
+ q
+ ).unique():
shouts.append(shout)
shout.stat = {
"viewed": shout.views,
"reacted": reacted_stat,
"commented": commented_stat,
- "rating": rating_stat
+ "rating": rating_stat,
}
shouts_map[shout.id] = shout
diff --git a/resolvers/zine/profile.py b/resolvers/zine/profile.py
index 552af43f..5edb1c4b 100644
--- a/resolvers/zine/profile.py
+++ b/resolvers/zine/profile.py
@@ -1,6 +1,7 @@
-from typing import List
from datetime import datetime, timedelta, timezone
-from sqlalchemy import and_, func, distinct, select, literal
+from typing import List
+
+from sqlalchemy import and_, distinct, func, literal, select
from sqlalchemy.orm import aliased, joinedload
from auth.authenticate import login_required
@@ -21,27 +22,27 @@ def add_author_stat_columns(q):
# user_rating_aliased = aliased(UserRating)
q = q.outerjoin(shout_author_aliased).add_columns(
- func.count(distinct(shout_author_aliased.shout)).label('shouts_stat')
+ func.count(distinct(shout_author_aliased.shout)).label("shouts_stat")
)
q = q.outerjoin(author_followers, author_followers.author == User.id).add_columns(
- func.count(distinct(author_followers.follower)).label('followers_stat')
+ func.count(distinct(author_followers.follower)).label("followers_stat")
)
q = q.outerjoin(author_following, author_following.follower == User.id).add_columns(
- func.count(distinct(author_following.author)).label('followings_stat')
+ func.count(distinct(author_following.author)).label("followings_stat")
)
- q = q.add_columns(literal(0).label('rating_stat'))
+ q = q.add_columns(literal(0).label("rating_stat"))
# FIXME
# q = q.outerjoin(user_rating_aliased, user_rating_aliased.user == User.id).add_columns(
# # TODO: check
# func.sum(user_rating_aliased.value).label('rating_stat')
# )
- q = q.add_columns(literal(0).label('commented_stat'))
- # q = q.outerjoin(Reaction, and_(Reaction.createdBy == User.id, Reaction.body.is_not(None))).add_columns(
- # func.count(distinct(Reaction.id)).label('commented_stat')
- # )
+ q = q.add_columns(literal(0).label("commented_stat"))
+ # q = q.outerjoin(
+ # Reaction, and_(Reaction.createdBy == User.id, Reaction.body.is_not(None))
+ # ).add_columns(func.count(distinct(Reaction.id)).label("commented_stat"))
q = q.group_by(User.id)
@@ -55,7 +56,7 @@ def add_stat(author, stat_columns):
"followers": followers_stat,
"followings": followings_stat,
"rating": rating_stat,
- "commented": commented_stat
+ "commented": commented_stat,
}
return author
@@ -119,10 +120,10 @@ async def user_followers(_, _info, slug) -> List[User]:
q = add_author_stat_columns(q)
aliased_user = aliased(User)
- q = q.join(AuthorFollower, AuthorFollower.follower == User.id).join(
- aliased_user, aliased_user.id == AuthorFollower.author
- ).where(
- aliased_user.slug == slug
+ q = (
+ q.join(AuthorFollower, AuthorFollower.follower == User.id)
+ .join(aliased_user, aliased_user.id == AuthorFollower.author)
+ .where(aliased_user.slug == slug)
)
return get_authors_from_query(q)
@@ -150,15 +151,10 @@ async def update_profile(_, info, profile):
with local_session() as session:
user = session.query(User).filter(User.id == user_id).one()
if not user:
- return {
- "error": "canoot find user"
- }
+ return {"error": "canoot find user"}
user.update(profile)
session.commit()
- return {
- "error": None,
- "author": user
- }
+ return {"error": None, "author": user}
@mutation.field("rateUser")
@@ -192,7 +188,8 @@ def author_follow(user_id, slug):
session.add(af)
session.commit()
return True
- except:
+ except Exception as e:
+ print(e)
return False
@@ -200,13 +197,10 @@ def author_follow(user_id, slug):
def author_unfollow(user_id, slug):
with local_session() as session:
flw = (
- session.query(
- AuthorFollower
- ).join(User, User.id == AuthorFollower.author).filter(
- and_(
- AuthorFollower.follower == user_id, User.slug == slug
- )
- ).first()
+ session.query(AuthorFollower)
+ .join(User, User.id == AuthorFollower.author)
+ .filter(and_(AuthorFollower.follower == user_id, User.slug == slug))
+ .first()
)
if flw:
session.delete(flw)
@@ -232,12 +226,11 @@ async def get_author(_, _info, slug):
[author] = get_authors_from_query(q)
with local_session() as session:
- comments_count = session.query(Reaction).where(
- and_(
- Reaction.createdBy == author.id,
- Reaction.kind == ReactionKind.COMMENT
- )
- ).count()
+ comments_count = (
+ session.query(Reaction)
+ .where(and_(Reaction.createdBy == author.id, Reaction.kind == ReactionKind.COMMENT))
+ .count()
+ )
author.stat["commented"] = comments_count
return author
@@ -260,9 +253,7 @@ async def load_authors_by(_, info, by, limit, offset):
days_before = datetime.now(tz=timezone.utc) - timedelta(days=by["createdAt"])
q = q.filter(User.createdAt > days_before)
- q = q.order_by(
- by.get("order", User.createdAt)
- ).limit(limit).offset(offset)
+ q = q.order_by(by.get("order", User.createdAt)).limit(limit).offset(offset)
return get_authors_from_query(q)
@@ -273,13 +264,13 @@ async def load_my_subscriptions(_, info):
auth = info.context["request"].auth
user_id = auth.user_id
- authors_query = select(User).join(AuthorFollower, AuthorFollower.author == User.id).where(
- AuthorFollower.follower == user_id
+ authors_query = (
+ select(User)
+ .join(AuthorFollower, AuthorFollower.author == User.id)
+ .where(AuthorFollower.follower == user_id)
)
- topics_query = select(Topic).join(TopicFollower).where(
- TopicFollower.follower == user_id
- )
+ topics_query = select(Topic).join(TopicFollower).where(TopicFollower.follower == user_id)
topics = []
authors = []
@@ -291,7 +282,4 @@ async def load_my_subscriptions(_, info):
for [topic] in session.execute(topics_query):
topics.append(topic)
- return {
- "topics": topics,
- "authors": authors
- }
+ return {"topics": topics, "authors": authors}
diff --git a/resolvers/zine/reactions.py b/resolvers/zine/reactions.py
index 1c132b69..46059c4e 100644
--- a/resolvers/zine/reactions.py
+++ b/resolvers/zine/reactions.py
@@ -1,5 +1,6 @@
from datetime import datetime, timedelta, timezone
-from sqlalchemy import and_, asc, desc, select, text, func, case
+
+from sqlalchemy import and_, asc, case, desc, func, select, text
from sqlalchemy.orm import aliased
from auth.authenticate import login_required
@@ -17,26 +18,22 @@ def add_reaction_stat_columns(q):
aliased_reaction = aliased(Reaction)
q = q.outerjoin(aliased_reaction, Reaction.id == aliased_reaction.replyTo).add_columns(
- func.sum(
- aliased_reaction.id
- ).label('reacted_stat'),
+ func.sum(aliased_reaction.id).label("reacted_stat"),
+ func.sum(case((aliased_reaction.body.is_not(None), 1), else_=0)).label("commented_stat"),
func.sum(
case(
- (aliased_reaction.body.is_not(None), 1),
- else_=0
+ (aliased_reaction.kind == ReactionKind.AGREE, 1),
+ (aliased_reaction.kind == ReactionKind.DISAGREE, -1),
+ (aliased_reaction.kind == ReactionKind.PROOF, 1),
+ (aliased_reaction.kind == ReactionKind.DISPROOF, -1),
+ (aliased_reaction.kind == ReactionKind.ACCEPT, 1),
+ (aliased_reaction.kind == ReactionKind.REJECT, -1),
+ (aliased_reaction.kind == ReactionKind.LIKE, 1),
+ (aliased_reaction.kind == ReactionKind.DISLIKE, -1),
+ else_=0,
)
- ).label('commented_stat'),
- func.sum(case(
- (aliased_reaction.kind == ReactionKind.AGREE, 1),
- (aliased_reaction.kind == ReactionKind.DISAGREE, -1),
- (aliased_reaction.kind == ReactionKind.PROOF, 1),
- (aliased_reaction.kind == ReactionKind.DISPROOF, -1),
- (aliased_reaction.kind == ReactionKind.ACCEPT, 1),
- (aliased_reaction.kind == ReactionKind.REJECT, -1),
- (aliased_reaction.kind == ReactionKind.LIKE, 1),
- (aliased_reaction.kind == ReactionKind.DISLIKE, -1),
- else_=0)
- ).label('rating_stat'))
+ ).label("rating_stat"),
+ )
return q
@@ -47,22 +44,25 @@ def reactions_follow(user_id, shout_id: int, auto=False):
shout = session.query(Shout).where(Shout.id == shout_id).one()
following = (
- session.query(ShoutReactionsFollower).where(and_(
- ShoutReactionsFollower.follower == user_id,
- ShoutReactionsFollower.shout == shout.id,
- )).first()
+ session.query(ShoutReactionsFollower)
+ .where(
+ and_(
+ ShoutReactionsFollower.follower == user_id,
+ ShoutReactionsFollower.shout == shout.id,
+ )
+ )
+ .first()
)
if not following:
following = ShoutReactionsFollower.create(
- follower=user_id,
- shout=shout.id,
- auto=auto
+ follower=user_id, shout=shout.id, auto=auto
)
session.add(following)
session.commit()
return True
- except:
+ except Exception as e:
+ print(e)
return False
@@ -72,46 +72,52 @@ def reactions_unfollow(user_id: int, shout_id: int):
shout = session.query(Shout).where(Shout.id == shout_id).one()
following = (
- session.query(ShoutReactionsFollower).where(and_(
- ShoutReactionsFollower.follower == user_id,
- ShoutReactionsFollower.shout == shout.id
- )).first()
+ session.query(ShoutReactionsFollower)
+ .where(
+ and_(
+ ShoutReactionsFollower.follower == user_id,
+ ShoutReactionsFollower.shout == shout.id,
+ )
+ )
+ .first()
)
if following:
session.delete(following)
session.commit()
return True
- except:
+ except Exception as e:
+ print(e)
pass
return False
def is_published_author(session, user_id):
- ''' checks if user has at least one publication '''
- return session.query(
- Shout
- ).where(
- Shout.authors.contains(user_id)
- ).filter(
- and_(
- Shout.publishedAt.is_not(None),
- Shout.deletedAt.is_(None)
- )
- ).count() > 0
+ """checks if user has at least one publication"""
+ return (
+ session.query(Shout)
+ .where(Shout.authors.contains(user_id))
+ .filter(and_(Shout.publishedAt.is_not(None), Shout.deletedAt.is_(None)))
+ .count()
+ > 0
+ )
def check_to_publish(session, user_id, reaction):
- ''' set shout to public if publicated approvers amount > 4 '''
+ """set shout to public if publicated approvers amount > 4"""
if not reaction.replyTo and reaction.kind in [
ReactionKind.ACCEPT,
ReactionKind.LIKE,
- ReactionKind.PROOF
+ ReactionKind.PROOF,
]:
if is_published_author(user_id):
# now count how many approvers are voted already
- approvers_reactions = session.query(Reaction).where(Reaction.shout == reaction.shout).all()
- approvers = [user_id, ]
+ approvers_reactions = (
+ session.query(Reaction).where(Reaction.shout == reaction.shout).all()
+ )
+ approvers = [
+ user_id,
+ ]
for ar in approvers_reactions:
a = ar.createdBy
if is_published_author(session, a):
@@ -122,21 +128,17 @@ def check_to_publish(session, user_id, reaction):
def check_to_hide(session, user_id, reaction):
- ''' hides any shout if 20% of reactions are negative '''
+ """hides any shout if 20% of reactions are negative"""
if not reaction.replyTo and reaction.kind in [
ReactionKind.REJECT,
ReactionKind.DISLIKE,
- ReactionKind.DISPROOF
+ ReactionKind.DISPROOF,
]:
# if is_published_author(user):
approvers_reactions = session.query(Reaction).where(Reaction.shout == reaction.shout).all()
rejects = 0
for r in approvers_reactions:
- if r.kind in [
- ReactionKind.REJECT,
- ReactionKind.DISLIKE,
- ReactionKind.DISPROOF
- ]:
+ if r.kind in [ReactionKind.REJECT, ReactionKind.DISLIKE, ReactionKind.DISPROOF]:
rejects += 1
if len(approvers_reactions) / rejects < 5:
return True
@@ -146,14 +148,14 @@ def check_to_hide(session, user_id, reaction):
def set_published(session, shout_id):
s = session.query(Shout).where(Shout.id == shout_id).first()
s.publishedAt = datetime.now(tz=timezone.utc)
- s.visibility = text('public')
+ s.visibility = text("public")
session.add(s)
session.commit()
def set_hidden(session, shout_id):
s = session.query(Shout).where(Shout.id == shout_id).first()
- s.visibility = text('community')
+ s.visibility = text("community")
session.add(s)
session.commit()
@@ -162,37 +164,46 @@ def set_hidden(session, shout_id):
@login_required
async def create_reaction(_, info, reaction):
auth: AuthCredentials = info.context["request"].auth
- reaction['createdBy'] = auth.user_id
+ reaction["createdBy"] = auth.user_id
rdict = {}
with local_session() as session:
shout = session.query(Shout).where(Shout.id == reaction["shout"]).one()
author = session.query(User).where(User.id == auth.user_id).one()
- if reaction["kind"] in [
- ReactionKind.DISLIKE.name,
- ReactionKind.LIKE.name
- ]:
- existing_reaction = session.query(Reaction).where(
- and_(
- Reaction.shout == reaction["shout"],
- Reaction.createdBy == auth.user_id,
- Reaction.kind == reaction["kind"],
- Reaction.replyTo == reaction.get("replyTo")
+ if reaction["kind"] in [ReactionKind.DISLIKE.name, ReactionKind.LIKE.name]:
+ existing_reaction = (
+ session.query(Reaction)
+ .where(
+ and_(
+ Reaction.shout == reaction["shout"],
+ Reaction.createdBy == auth.user_id,
+ Reaction.kind == reaction["kind"],
+ Reaction.replyTo == reaction.get("replyTo"),
+ )
)
- ).first()
+ .first()
+ )
if existing_reaction is not None:
raise OperationNotAllowed("You can't vote twice")
- opposite_reaction_kind = ReactionKind.DISLIKE if reaction["kind"] == ReactionKind.LIKE.name else ReactionKind.LIKE
- opposite_reaction = session.query(Reaction).where(
+ opposite_reaction_kind = (
+ ReactionKind.DISLIKE
+ if reaction["kind"] == ReactionKind.LIKE.name
+ else ReactionKind.LIKE
+ )
+ opposite_reaction = (
+ session.query(Reaction)
+ .where(
and_(
Reaction.shout == reaction["shout"],
Reaction.createdBy == auth.user_id,
Reaction.kind == opposite_reaction_kind,
- Reaction.replyTo == reaction.get("replyTo")
+ Reaction.replyTo == reaction.get("replyTo"),
)
- ).first()
+ )
+ .first()
+ )
if opposite_reaction is not None:
session.delete(opposite_reaction)
@@ -221,8 +232,8 @@ async def create_reaction(_, info, reaction):
await notification_service.handle_new_reaction(r.id)
rdict = r.dict()
- rdict['shout'] = shout.dict()
- rdict['createdBy'] = author.dict()
+ rdict["shout"] = shout.dict()
+ rdict["createdBy"] = author.dict()
# self-regulation mechanics
if check_to_hide(session, auth.user_id, r):
@@ -235,11 +246,7 @@ async def create_reaction(_, info, reaction):
except Exception as e:
print(f"[resolvers.reactions] error on reactions autofollowing: {e}")
- rdict['stat'] = {
- "commented": 0,
- "reacted": 0,
- "rating": 0
- }
+ rdict["stat"] = {"commented": 0, "reacted": 0, "rating": 0}
return {"reaction": rdict}
@@ -269,11 +276,7 @@ async def update_reaction(_, info, id, reaction={}):
if reaction.get("range"):
r.range = reaction.get("range")
session.commit()
- r.stat = {
- "commented": commented_stat,
- "reacted": reacted_stat,
- "rating": rating_stat
- }
+ r.stat = {"commented": commented_stat, "reacted": reacted_stat, "rating": rating_stat}
return {"reaction": r}
@@ -290,17 +293,12 @@ async def delete_reaction(_, info, id):
if r.createdBy != auth.user_id:
return {"error": "access denied"}
- if r.kind in [
- ReactionKind.LIKE,
- ReactionKind.DISLIKE
- ]:
+ if r.kind in [ReactionKind.LIKE, ReactionKind.DISLIKE]:
session.delete(r)
else:
r.deletedAt = datetime.now(tz=timezone.utc)
session.commit()
- return {
- "reaction": r
- }
+ return {"reaction": r}
@query.field("loadReactionsBy")
@@ -321,12 +319,10 @@ async def load_reactions_by(_, _info, by, limit=50, offset=0):
:return: Reaction[]
"""
- q = select(
- Reaction, User, Shout
- ).join(
- User, Reaction.createdBy == User.id
- ).join(
- Shout, Reaction.shout == Shout.id
+ q = (
+ select(Reaction, User, Shout)
+ .join(User, Reaction.createdBy == User.id)
+ .join(Shout, Reaction.shout == Shout.id)
)
if by.get("shout"):
@@ -344,7 +340,7 @@ async def load_reactions_by(_, _info, by, limit=50, offset=0):
if by.get("comment"):
q = q.filter(func.length(Reaction.body) > 0)
- if len(by.get('search', '')) > 2:
+ if len(by.get("search", "")) > 2:
q = q.filter(Reaction.body.ilike(f'%{by["body"]}%'))
if by.get("days"):
@@ -352,13 +348,9 @@ async def load_reactions_by(_, _info, by, limit=50, offset=0):
q = q.filter(Reaction.createdAt > after)
order_way = asc if by.get("sort", "").startswith("-") else desc
- order_field = by.get("sort", "").replace('-', '') or Reaction.createdAt
+ order_field = by.get("sort", "").replace("-", "") or Reaction.createdAt
- q = q.group_by(
- Reaction.id, User.id, Shout.id
- ).order_by(
- order_way(order_field)
- )
+ q = q.group_by(Reaction.id, User.id, Shout.id).order_by(order_way(order_field))
q = add_reaction_stat_columns(q)
@@ -367,13 +359,15 @@ async def load_reactions_by(_, _info, by, limit=50, offset=0):
reactions = []
with local_session() as session:
- for [reaction, user, shout, reacted_stat, commented_stat, rating_stat] in session.execute(q):
+ for [reaction, user, shout, reacted_stat, commented_stat, rating_stat] in session.execute(
+ q
+ ):
reaction.createdBy = user
reaction.shout = shout
reaction.stat = {
"rating": rating_stat,
"commented": commented_stat,
- "reacted": reacted_stat
+ "reacted": reacted_stat,
}
reaction.kind = reaction.kind.name
diff --git a/resolvers/zine/topics.py b/resolvers/zine/topics.py
index f354a7b4..ad4f59fc 100644
--- a/resolvers/zine/topics.py
+++ b/resolvers/zine/topics.py
@@ -1,24 +1,25 @@
-from sqlalchemy import and_, select, distinct, func
+from sqlalchemy import and_, distinct, func, select
from sqlalchemy.orm import aliased
from auth.authenticate import login_required
from base.orm import local_session
from base.resolvers import mutation, query
-from orm.shout import ShoutTopic, ShoutAuthor
-from orm.topic import Topic, TopicFollower
from orm import User
+from orm.shout import ShoutAuthor, ShoutTopic
+from orm.topic import Topic, TopicFollower
def add_topic_stat_columns(q):
aliased_shout_author = aliased(ShoutAuthor)
aliased_topic_follower = aliased(TopicFollower)
- q = q.outerjoin(ShoutTopic, Topic.id == ShoutTopic.topic).add_columns(
- func.count(distinct(ShoutTopic.shout)).label('shouts_stat')
- ).outerjoin(aliased_shout_author, ShoutTopic.shout == aliased_shout_author.shout).add_columns(
- func.count(distinct(aliased_shout_author.user)).label('authors_stat')
- ).outerjoin(aliased_topic_follower).add_columns(
- func.count(distinct(aliased_topic_follower.follower)).label('followers_stat')
+ q = (
+ q.outerjoin(ShoutTopic, Topic.id == ShoutTopic.topic)
+ .add_columns(func.count(distinct(ShoutTopic.shout)).label("shouts_stat"))
+ .outerjoin(aliased_shout_author, ShoutTopic.shout == aliased_shout_author.shout)
+ .add_columns(func.count(distinct(aliased_shout_author.user)).label("authors_stat"))
+ .outerjoin(aliased_topic_follower)
+ .add_columns(func.count(distinct(aliased_topic_follower.follower)).label("followers_stat"))
)
q = q.group_by(Topic.id)
@@ -28,11 +29,7 @@ def add_topic_stat_columns(q):
def add_stat(topic, stat_columns):
[shouts_stat, authors_stat, followers_stat] = stat_columns
- topic.stat = {
- "shouts": shouts_stat,
- "authors": authors_stat,
- "followers": followers_stat
- }
+ topic.stat = {"shouts": shouts_stat, "authors": authors_stat, "followers": followers_stat}
return topic
@@ -125,7 +122,8 @@ def topic_follow(user_id, slug):
session.add(following)
session.commit()
return True
- except:
+ except Exception as e:
+ print(e)
return False
@@ -133,18 +131,17 @@ def topic_unfollow(user_id, slug):
try:
with local_session() as session:
sub = (
- session.query(TopicFollower).join(Topic).filter(
- and_(
- TopicFollower.follower == user_id,
- Topic.slug == slug
- )
- ).first()
+ session.query(TopicFollower)
+ .join(Topic)
+ .filter(and_(TopicFollower.follower == user_id, Topic.slug == slug))
+ .first()
)
if sub:
session.delete(sub)
session.commit()
return True
- except:
+ except Exception as e:
+ print(e)
pass
return False
diff --git a/schema_types.py b/schema_types.py
new file mode 100644
index 00000000..99b3a68f
--- /dev/null
+++ b/schema_types.py
@@ -0,0 +1,1157 @@
+from enum import Enum
+from typing import Any, ClassVar, List, Optional, TypedDict
+
+## Scalars
+
+DateTime = Any
+
+MessageStatus = Enum("MessageStatus", "NEW UPDATED DELETED")
+
+
+ReactionStatus = Enum("ReactionStatus", "NEW UPDATED CHANGED EXPLAINED DELETED")
+
+
+FollowingEntity = Enum("FollowingEntity", "TOPIC AUTHOR COMMUNITY REACTIONS")
+
+
+ReactionKind = Enum(
+ "ReactionKind",
+ "LIKE DISLIKE AGREE DISAGREE PROOF DISPROOF COMMENT QUOTE PROPOSE ASK REMARK FOOTNOTE ACCEPT REJECT",
+)
+
+
+NotificationType = Enum("NotificationType", "NEW_COMMENT NEW_REPLY")
+
+
+AuthResult = TypedDict(
+ "AuthResult",
+ {
+ "error": Optional[str],
+ "token": Optional[str],
+ "user": Optional["User"],
+ },
+)
+
+
+ChatMember = TypedDict(
+ "ChatMember",
+ {
+ "id": int,
+ "slug": str,
+ "name": str,
+ "userpic": Optional[str],
+ "lastSeen": Optional["DateTime"],
+ "online": Optional[bool],
+ },
+)
+
+
+AuthorStat = TypedDict(
+ "AuthorStat",
+ {
+ "followings": Optional[int],
+ "followers": Optional[int],
+ "rating": Optional[int],
+ "commented": Optional[int],
+ "shouts": Optional[int],
+ },
+)
+
+
+Author = TypedDict(
+ "Author",
+ {
+ "id": int,
+ "slug": str,
+ "name": str,
+ "userpic": Optional[str],
+ "caption": Optional[str],
+ "bio": Optional[str],
+ "about": Optional[str],
+ "links": Optional[List[str]],
+ "stat": Optional["AuthorStat"],
+ "roles": Optional[List["Role"]],
+ "lastSeen": Optional["DateTime"],
+ "createdAt": Optional["DateTime"],
+ },
+)
+
+
+Result = TypedDict(
+ "Result",
+ {
+ "error": Optional[str],
+ "slugs": Optional[List[str]],
+ "chat": Optional["Chat"],
+ "chats": Optional[List["Chat"]],
+ "message": Optional["Message"],
+ "messages": Optional[List["Message"]],
+ "members": Optional[List["ChatMember"]],
+ "shout": Optional["Shout"],
+ "shouts": Optional[List["Shout"]],
+ "author": Optional["Author"],
+ "authors": Optional[List["Author"]],
+ "reaction": Optional["Reaction"],
+ "reactions": Optional[List["Reaction"]],
+ "topic": Optional["Topic"],
+ "topics": Optional[List["Topic"]],
+ "community": Optional["Community"],
+ "communities": Optional[List["Community"]],
+ },
+)
+
+
+ReactionUpdating = TypedDict(
+ "ReactionUpdating",
+ {
+ "error": Optional[str],
+ "status": Optional["ReactionStatus"],
+ "reaction": Optional["Reaction"],
+ },
+)
+
+
+Mutation = TypedDict(
+ "Mutation",
+ {
+ "createChat": "CreateChatMutationResult",
+ "updateChat": "UpdateChatMutationResult",
+ "deleteChat": "DeleteChatMutationResult",
+ "createMessage": "CreateMessageMutationResult",
+ "updateMessage": "UpdateMessageMutationResult",
+ "deleteMessage": "DeleteMessageMutationResult",
+ "markAsRead": "MarkAsReadMutationResult",
+ "getSession": "GetSessionMutationResult",
+ "registerUser": "RegisterUserMutationResult",
+ "sendLink": "SendLinkMutationResult",
+ "confirmEmail": "ConfirmEmailMutationResult",
+ "createShout": "CreateShoutMutationResult",
+ "updateShout": "UpdateShoutMutationResult",
+ "deleteShout": "DeleteShoutMutationResult",
+ "rateUser": "RateUserMutationResult",
+ "updateProfile": "UpdateProfileMutationResult",
+ "createTopic": "CreateTopicMutationResult",
+ "updateTopic": "UpdateTopicMutationResult",
+ "destroyTopic": "DestroyTopicMutationResult",
+ "createReaction": "CreateReactionMutationResult",
+ "updateReaction": "UpdateReactionMutationResult",
+ "deleteReaction": "DeleteReactionMutationResult",
+ "follow": "FollowMutationResult",
+ "unfollow": "UnfollowMutationResult",
+ "markNotificationAsRead": "MarkNotificationAsReadMutationResult",
+ "markAllNotificationsAsRead": "MarkAllNotificationsAsReadMutationResult",
+ },
+)
+
+
+CreateChatParams = TypedDict(
+ "CreateChatParams",
+ {
+ "title": Optional[str],
+ "members": List[int],
+ },
+)
+
+
+CreateChatMutationResult = ClassVar["Result"]
+
+
+UpdateChatParams = TypedDict(
+ "UpdateChatParams",
+ {
+ "chat": "ChatInput",
+ },
+)
+
+
+UpdateChatMutationResult = ClassVar["Result"]
+
+
+DeleteChatParams = TypedDict(
+ "DeleteChatParams",
+ {
+ "chatId": str,
+ },
+)
+
+
+DeleteChatMutationResult = ClassVar["Result"]
+
+
+CreateMessageParams = TypedDict(
+ "CreateMessageParams",
+ {
+ "chat": str,
+ "body": str,
+ "replyTo": Optional[int],
+ },
+)
+
+
+CreateMessageMutationResult = ClassVar["Result"]
+
+
+UpdateMessageParams = TypedDict(
+ "UpdateMessageParams",
+ {
+ "chatId": str,
+ "id": int,
+ "body": str,
+ },
+)
+
+
+UpdateMessageMutationResult = ClassVar["Result"]
+
+
+DeleteMessageParams = TypedDict(
+ "DeleteMessageParams",
+ {
+ "chatId": str,
+ "id": int,
+ },
+)
+
+
+DeleteMessageMutationResult = ClassVar["Result"]
+
+
+MarkAsReadParams = TypedDict(
+ "MarkAsReadParams",
+ {
+ "chatId": str,
+ "ids": List[int],
+ },
+)
+
+
+MarkAsReadMutationResult = ClassVar["Result"]
+
+
+GetSessionMutationResult = ClassVar["AuthResult"]
+
+
+RegisterUserParams = TypedDict(
+ "RegisterUserParams",
+ {
+ "email": str,
+ "password": Optional[str],
+ "name": Optional[str],
+ },
+)
+
+
+RegisterUserMutationResult = ClassVar["AuthResult"]
+
+
+SendLinkParams = TypedDict(
+ "SendLinkParams",
+ {
+ "email": str,
+ "lang": Optional[str],
+ "template": Optional[str],
+ },
+)
+
+
+SendLinkMutationResult = ClassVar["Result"]
+
+
+ConfirmEmailParams = TypedDict(
+ "ConfirmEmailParams",
+ {
+ "token": str,
+ },
+)
+
+
+ConfirmEmailMutationResult = ClassVar["AuthResult"]
+
+
+CreateShoutParams = TypedDict(
+ "CreateShoutParams",
+ {
+ "inp": "ShoutInput",
+ },
+)
+
+
+CreateShoutMutationResult = ClassVar["Result"]
+
+
+UpdateShoutParams = TypedDict(
+ "UpdateShoutParams",
+ {
+ "shout_id": int,
+ "shout_input": Optional["ShoutInput"],
+ "publish": Optional[bool],
+ },
+)
+
+
+UpdateShoutMutationResult = ClassVar["Result"]
+
+
+DeleteShoutParams = TypedDict(
+ "DeleteShoutParams",
+ {
+ "shout_id": int,
+ },
+)
+
+
+DeleteShoutMutationResult = ClassVar["Result"]
+
+
+RateUserParams = TypedDict(
+ "RateUserParams",
+ {
+ "slug": str,
+ "value": int,
+ },
+)
+
+
+RateUserMutationResult = ClassVar["Result"]
+
+
+UpdateProfileParams = TypedDict(
+ "UpdateProfileParams",
+ {
+ "profile": "ProfileInput",
+ },
+)
+
+
+UpdateProfileMutationResult = ClassVar["Result"]
+
+
+CreateTopicParams = TypedDict(
+ "CreateTopicParams",
+ {
+ "input": "TopicInput",
+ },
+)
+
+
+CreateTopicMutationResult = ClassVar["Result"]
+
+
+UpdateTopicParams = TypedDict(
+ "UpdateTopicParams",
+ {
+ "input": "TopicInput",
+ },
+)
+
+
+UpdateTopicMutationResult = ClassVar["Result"]
+
+
+DestroyTopicParams = TypedDict(
+ "DestroyTopicParams",
+ {
+ "slug": str,
+ },
+)
+
+
+DestroyTopicMutationResult = ClassVar["Result"]
+
+
+CreateReactionParams = TypedDict(
+ "CreateReactionParams",
+ {
+ "reaction": "ReactionInput",
+ },
+)
+
+
+CreateReactionMutationResult = ClassVar["Result"]
+
+
+UpdateReactionParams = TypedDict(
+ "UpdateReactionParams",
+ {
+ "id": int,
+ "reaction": "ReactionInput",
+ },
+)
+
+
+UpdateReactionMutationResult = ClassVar["Result"]
+
+
+DeleteReactionParams = TypedDict(
+ "DeleteReactionParams",
+ {
+ "id": int,
+ },
+)
+
+
+DeleteReactionMutationResult = ClassVar["Result"]
+
+
+FollowParams = TypedDict(
+ "FollowParams",
+ {
+ "what": "FollowingEntity",
+ "slug": str,
+ },
+)
+
+
+FollowMutationResult = ClassVar["Result"]
+
+
+UnfollowParams = TypedDict(
+ "UnfollowParams",
+ {
+ "what": "FollowingEntity",
+ "slug": str,
+ },
+)
+
+
+UnfollowMutationResult = ClassVar["Result"]
+
+
+MarkNotificationAsReadParams = TypedDict(
+ "MarkNotificationAsReadParams",
+ {
+ "notification_id": int,
+ },
+)
+
+
+MarkNotificationAsReadMutationResult = ClassVar["Result"]
+
+
+MarkAllNotificationsAsReadMutationResult = ClassVar["Result"]
+
+
+NotificationsQueryResult = TypedDict(
+ "NotificationsQueryResult",
+ {
+ "notifications": List["Notification"],
+ "totalCount": int,
+ "totalUnreadCount": int,
+ },
+)
+
+
+MySubscriptionsQueryResult = TypedDict(
+ "MySubscriptionsQueryResult",
+ {
+ "topics": List["Topic"],
+ "authors": List["Author"],
+ },
+)
+
+
+Query = TypedDict(
+ "Query",
+ {
+ "loadChats": "LoadChatsQueryResult",
+ "loadMessagesBy": "LoadMessagesByQueryResult",
+ "loadRecipients": "LoadRecipientsQueryResult",
+ "searchRecipients": "SearchRecipientsQueryResult",
+ "searchMessages": "SearchMessagesQueryResult",
+ "isEmailUsed": "IsEmailUsedQueryResult",
+ "signIn": "SignInQueryResult",
+ "signOut": "SignOutQueryResult",
+ "loadAuthorsBy": "LoadAuthorsByQueryResult",
+ "loadShout": "LoadShoutQueryResult",
+ "loadShouts": "LoadShoutsQueryResult",
+ "loadDrafts": "LoadDraftsQueryResult",
+ "loadReactionsBy": "LoadReactionsByQueryResult",
+ "userFollowers": "UserFollowersQueryResult",
+ "userFollowedAuthors": "UserFollowedAuthorsQueryResult",
+ "userFollowedTopics": "UserFollowedTopicsQueryResult",
+ "authorsAll": "AuthorsAllQueryResult",
+ "getAuthor": "GetAuthorQueryResult",
+ "myFeed": "MyFeedQueryResult",
+ "markdownBody": "MarkdownBodyQueryResult",
+ "getTopic": "GetTopicQueryResult",
+ "topicsAll": "TopicsAllQueryResult",
+ "topicsRandom": "TopicsRandomQueryResult",
+ "topicsByCommunity": "TopicsByCommunityQueryResult",
+ "topicsByAuthor": "TopicsByAuthorQueryResult",
+ "loadNotifications": "LoadNotificationsQueryResult",
+ "loadMySubscriptions": "LoadMySubscriptionsQueryResult",
+ },
+)
+
+
+LoadChatsParams = TypedDict(
+ "LoadChatsParams",
+ {
+ "limit": Optional[int],
+ "offset": Optional[int],
+ },
+)
+
+
+LoadChatsQueryResult = ClassVar["Result"]
+
+
+LoadMessagesByParams = TypedDict(
+ "LoadMessagesByParams",
+ {
+ "by": "MessagesBy",
+ "limit": Optional[int],
+ "offset": Optional[int],
+ },
+)
+
+
+LoadMessagesByQueryResult = ClassVar["Result"]
+
+
+LoadRecipientsParams = TypedDict(
+ "LoadRecipientsParams",
+ {
+ "limit": Optional[int],
+ "offset": Optional[int],
+ },
+)
+
+
+LoadRecipientsQueryResult = ClassVar["Result"]
+
+
+SearchRecipientsParams = TypedDict(
+ "SearchRecipientsParams",
+ {
+ "query": str,
+ "limit": Optional[int],
+ "offset": Optional[int],
+ },
+)
+
+
+SearchRecipientsQueryResult = ClassVar["Result"]
+
+
+SearchMessagesParams = TypedDict(
+ "SearchMessagesParams",
+ {
+ "by": "MessagesBy",
+ "limit": Optional[int],
+ "offset": Optional[int],
+ },
+)
+
+
+SearchMessagesQueryResult = ClassVar["Result"]
+
+
+IsEmailUsedParams = TypedDict(
+ "IsEmailUsedParams",
+ {
+ "email": str,
+ },
+)
+
+
+IsEmailUsedQueryResult = bool
+
+
+SignInParams = TypedDict(
+ "SignInParams",
+ {
+ "email": str,
+ "password": Optional[str],
+ "lang": Optional[str],
+ },
+)
+
+
+SignInQueryResult = ClassVar["AuthResult"]
+
+
+SignOutQueryResult = ClassVar["AuthResult"]
+
+
+LoadAuthorsByParams = TypedDict(
+ "LoadAuthorsByParams",
+ {
+ "by": Optional["AuthorsBy"],
+ "limit": Optional[int],
+ "offset": Optional[int],
+ },
+)
+
+
+LoadAuthorsByQueryResult = ClassVar[List["Author"]]
+
+
+LoadShoutParams = TypedDict(
+ "LoadShoutParams",
+ {
+ "slug": Optional[str],
+ "shout_id": Optional[int],
+ },
+)
+
+
+LoadShoutQueryResult = ClassVar[Optional["Shout"]]
+
+
+LoadShoutsParams = TypedDict(
+ "LoadShoutsParams",
+ {
+ "options": Optional["LoadShoutsOptions"],
+ },
+)
+
+
+LoadShoutsQueryResult = ClassVar[List["Shout"]]
+
+
+LoadDraftsQueryResult = ClassVar[List["Shout"]]
+
+
+LoadReactionsByParams = TypedDict(
+ "LoadReactionsByParams",
+ {
+ "by": "ReactionBy",
+ "limit": Optional[int],
+ "offset": Optional[int],
+ },
+)
+
+
+LoadReactionsByQueryResult = ClassVar[List["Reaction"]]
+
+
+UserFollowersParams = TypedDict(
+ "UserFollowersParams",
+ {
+ "slug": str,
+ },
+)
+
+
+UserFollowersQueryResult = ClassVar[List["Author"]]
+
+
+UserFollowedAuthorsParams = TypedDict(
+ "UserFollowedAuthorsParams",
+ {
+ "slug": str,
+ },
+)
+
+
+UserFollowedAuthorsQueryResult = ClassVar[List["Author"]]
+
+
+UserFollowedTopicsParams = TypedDict(
+ "UserFollowedTopicsParams",
+ {
+ "slug": str,
+ },
+)
+
+
+UserFollowedTopicsQueryResult = ClassVar[List["Topic"]]
+
+
+AuthorsAllQueryResult = ClassVar[List["Author"]]
+
+
+GetAuthorParams = TypedDict(
+ "GetAuthorParams",
+ {
+ "slug": str,
+ },
+)
+
+
+GetAuthorQueryResult = ClassVar[Optional["Author"]]
+
+
+MyFeedParams = TypedDict(
+ "MyFeedParams",
+ {
+ "options": Optional["LoadShoutsOptions"],
+ },
+)
+
+
+MyFeedQueryResult = ClassVar[Optional[List["Shout"]]]
+
+
+MarkdownBodyParams = TypedDict(
+ "MarkdownBodyParams",
+ {
+ "body": str,
+ },
+)
+
+
+MarkdownBodyQueryResult = str
+
+
+GetTopicParams = TypedDict(
+ "GetTopicParams",
+ {
+ "slug": str,
+ },
+)
+
+
+GetTopicQueryResult = ClassVar[Optional["Topic"]]
+
+
+TopicsAllQueryResult = ClassVar[List["Topic"]]
+
+
+TopicsRandomParams = TypedDict(
+ "TopicsRandomParams",
+ {
+ "amount": Optional[int],
+ },
+)
+
+
+TopicsRandomQueryResult = ClassVar[List["Topic"]]
+
+
+TopicsByCommunityParams = TypedDict(
+ "TopicsByCommunityParams",
+ {
+ "community": str,
+ },
+)
+
+
+TopicsByCommunityQueryResult = ClassVar[List["Topic"]]
+
+
+TopicsByAuthorParams = TypedDict(
+ "TopicsByAuthorParams",
+ {
+ "author": str,
+ },
+)
+
+
+TopicsByAuthorQueryResult = ClassVar[List["Topic"]]
+
+
+LoadNotificationsParams = TypedDict(
+ "LoadNotificationsParams",
+ {
+ "params": "NotificationsQueryParams",
+ },
+)
+
+
+LoadNotificationsQueryResult = ClassVar["NotificationsQueryResult"]
+
+
+LoadMySubscriptionsQueryResult = ClassVar[Optional["MySubscriptionsQueryResult"]]
+
+
+Resource = TypedDict(
+ "Resource",
+ {
+ "id": int,
+ "name": str,
+ },
+)
+
+
+Operation = TypedDict(
+ "Operation",
+ {
+ "id": int,
+ "name": str,
+ },
+)
+
+
+Permission = TypedDict(
+ "Permission",
+ {
+ "operation": int,
+ "resource": int,
+ },
+)
+
+
+Role = TypedDict(
+ "Role",
+ {
+ "id": int,
+ "name": str,
+ "community": str,
+ "desc": Optional[str],
+ "permissions": List["Permission"],
+ },
+)
+
+
+Rating = TypedDict(
+ "Rating",
+ {
+ "rater": str,
+ "value": int,
+ },
+)
+
+
+User = TypedDict(
+ "User",
+ {
+ "id": int,
+ "username": str,
+ "createdAt": "DateTime",
+ "lastSeen": Optional["DateTime"],
+ "slug": str,
+ "name": Optional[str],
+ "email": Optional[str],
+ "password": Optional[str],
+ "oauth": Optional[str],
+ "userpic": Optional[str],
+ "links": Optional[List[str]],
+ "emailConfirmed": Optional[bool],
+ "muted": Optional[bool],
+ "updatedAt": Optional["DateTime"],
+ "ratings": Optional[List["Rating"]],
+ "bio": Optional[str],
+ "about": Optional[str],
+ "communities": Optional[List[int]],
+ "oid": Optional[str],
+ },
+)
+
+
+Reaction = TypedDict(
+ "Reaction",
+ {
+ "id": int,
+ "shout": "Shout",
+ "createdAt": "DateTime",
+ "createdBy": "User",
+ "updatedAt": Optional["DateTime"],
+ "deletedAt": Optional["DateTime"],
+ "deletedBy": Optional["User"],
+ "range": Optional[str],
+ "kind": "ReactionKind",
+ "body": Optional[str],
+ "replyTo": Optional[int],
+ "stat": Optional["Stat"],
+ "old_id": Optional[str],
+ "old_thread": Optional[str],
+ },
+)
+
+
+Shout = TypedDict(
+ "Shout",
+ {
+ "id": int,
+ "slug": str,
+ "body": str,
+ "lead": Optional[str],
+ "description": Optional[str],
+ "createdAt": "DateTime",
+ "topics": Optional[List["Topic"]],
+ "mainTopic": Optional[str],
+ "title": Optional[str],
+ "subtitle": Optional[str],
+ "authors": Optional[List["Author"]],
+ "lang": Optional[str],
+ "community": Optional[str],
+ "cover": Optional[str],
+ "layout": Optional[str],
+ "versionOf": Optional[str],
+ "visibility": Optional[str],
+ "updatedAt": Optional["DateTime"],
+ "updatedBy": Optional["User"],
+ "deletedAt": Optional["DateTime"],
+ "deletedBy": Optional["User"],
+ "publishedAt": Optional["DateTime"],
+ "media": Optional[str],
+ "stat": Optional["Stat"],
+ },
+)
+
+
+Stat = TypedDict(
+ "Stat",
+ {
+ "viewed": Optional[int],
+ "reacted": Optional[int],
+ "rating": Optional[int],
+ "commented": Optional[int],
+ "ranking": Optional[int],
+ },
+)
+
+
+Community = TypedDict(
+ "Community",
+ {
+ "id": int,
+ "slug": str,
+ "name": str,
+ "desc": Optional[str],
+ "pic": str,
+ "createdAt": "DateTime",
+ "createdBy": "User",
+ },
+)
+
+
+Collection = TypedDict(
+ "Collection",
+ {
+ "id": int,
+ "slug": str,
+ "title": str,
+ "desc": Optional[str],
+ "amount": Optional[int],
+ "publishedAt": Optional["DateTime"],
+ "createdAt": "DateTime",
+ "createdBy": "User",
+ },
+)
+
+
+TopicStat = TypedDict(
+ "TopicStat",
+ {
+ "shouts": int,
+ "followers": int,
+ "authors": int,
+ },
+)
+
+
+Topic = TypedDict(
+ "Topic",
+ {
+ "id": int,
+ "slug": str,
+ "title": Optional[str],
+ "body": Optional[str],
+ "pic": Optional[str],
+ "stat": Optional["TopicStat"],
+ "oid": Optional[str],
+ },
+)
+
+
+Token = TypedDict(
+ "Token",
+ {
+ "createdAt": "DateTime",
+ "expiresAt": Optional["DateTime"],
+ "id": int,
+ "ownerId": int,
+ "usedAt": Optional["DateTime"],
+ "value": str,
+ },
+)
+
+
+Message = TypedDict(
+ "Message",
+ {
+ "author": int,
+ "chatId": str,
+ "body": str,
+ "createdAt": int,
+ "id": int,
+ "replyTo": Optional[int],
+ "updatedAt": Optional[int],
+ "seen": Optional[bool],
+ },
+)
+
+
+Chat = TypedDict(
+ "Chat",
+ {
+ "id": str,
+ "createdAt": int,
+ "createdBy": int,
+ "updatedAt": int,
+ "title": Optional[str],
+ "description": Optional[str],
+ "users": Optional[List[int]],
+ "members": Optional[List["ChatMember"]],
+ "admins": Optional[List[int]],
+ "messages": Optional[List["Message"]],
+ "unread": Optional[int],
+ "private": Optional[bool],
+ },
+)
+
+
+Notification = TypedDict(
+ "Notification",
+ {
+ "id": int,
+ "shout": Optional[int],
+ "reaction": Optional[int],
+ "type": "NotificationType",
+ "createdAt": "DateTime",
+ "seen": bool,
+ "data": Optional[str],
+ "occurrences": int,
+ },
+)
+
+
+ShoutInput = TypedDict(
+ "ShoutInput",
+ {
+ "slug": Optional[str],
+ "title": Optional[str],
+ "body": Optional[str],
+ "lead": Optional[str],
+ "description": Optional[str],
+ "layout": Optional[str],
+ "media": Optional[str],
+ "authors": Optional[List[str]],
+ "topics": Optional[List["TopicInput"]],
+ "community": Optional[int],
+ "mainTopic": Optional["TopicInput"],
+ "subtitle": Optional[str],
+ "cover": Optional[str],
+ },
+)
+
+
+ProfileInput = TypedDict(
+ "ProfileInput",
+ {
+ "slug": Optional[str],
+ "name": Optional[str],
+ "userpic": Optional[str],
+ "links": Optional[List[str]],
+ "bio": Optional[str],
+ "about": Optional[str],
+ },
+)
+
+
+TopicInput = TypedDict(
+ "TopicInput",
+ {
+ "id": Optional[int],
+ "slug": str,
+ "title": Optional[str],
+ "body": Optional[str],
+ "pic": Optional[str],
+ },
+)
+
+
+ReactionInput = TypedDict(
+ "ReactionInput",
+ {
+ "kind": "ReactionKind",
+ "shout": int,
+ "range": Optional[str],
+ "body": Optional[str],
+ "replyTo": Optional[int],
+ },
+)
+
+
+ChatInput = TypedDict(
+ "ChatInput",
+ {
+ "id": str,
+ "title": Optional[str],
+ "description": Optional[str],
+ },
+)
+
+
+MessagesBy = TypedDict(
+ "MessagesBy",
+ {
+ "author": Optional[str],
+ "body": Optional[str],
+ "chat": Optional[str],
+ "order": Optional[str],
+ "days": Optional[int],
+ "stat": Optional[str],
+ },
+)
+
+
+AuthorsBy = TypedDict(
+ "AuthorsBy",
+ {
+ "lastSeen": Optional["DateTime"],
+ "createdAt": Optional["DateTime"],
+ "slug": Optional[str],
+ "name": Optional[str],
+ "topic": Optional[str],
+ "order": Optional[str],
+ "days": Optional[int],
+ "stat": Optional[str],
+ },
+)
+
+
+LoadShoutsFilters = TypedDict(
+ "LoadShoutsFilters",
+ {
+ "title": Optional[str],
+ "body": Optional[str],
+ "topic": Optional[str],
+ "author": Optional[str],
+ "layout": Optional[str],
+ "excludeLayout": Optional[str],
+ "visibility": Optional[str],
+ "days": Optional[int],
+ "reacted": Optional[bool],
+ },
+)
+
+
+LoadShoutsOptions = TypedDict(
+ "LoadShoutsOptions",
+ {
+ "filters": Optional["LoadShoutsFilters"],
+ "with_author_captions": Optional[bool],
+ "limit": int,
+ "offset": Optional[int],
+ "order_by": Optional[str],
+ "order_by_desc": Optional[bool],
+ },
+)
+
+
+ReactionBy = TypedDict(
+ "ReactionBy",
+ {
+ "shout": Optional[str],
+ "shouts": Optional[List[str]],
+ "search": Optional[str],
+ "comment": Optional[bool],
+ "topic": Optional[str],
+ "createdBy": Optional[str],
+ "days": Optional[int],
+ "sort": Optional[str],
+ },
+)
+
+
+NotificationsQueryParams = TypedDict(
+ "NotificationsQueryParams",
+ {
+ "limit": Optional[int],
+ "offset": Optional[int],
+ },
+)
diff --git a/server.py b/server.py
index 753c60ae..db7157dd 100644
--- a/server.py
+++ b/server.py
@@ -1,8 +1,9 @@
-import sys
import os
+import sys
+
import uvicorn
-from settings import PORT, DEV_SERVER_PID_FILE_NAME
+from settings import DEV_SERVER_PID_FILE_NAME, PORT
def exception_handler(exception_type, exception, traceback, debug_hook=sys.excepthook):
@@ -10,47 +11,36 @@ def exception_handler(exception_type, exception, traceback, debug_hook=sys.excep
log_settings = {
- 'version': 1,
- 'disable_existing_loggers': True,
- 'formatters': {
- 'default': {
- '()': 'uvicorn.logging.DefaultFormatter',
- 'fmt': '%(levelprefix)s %(message)s',
- 'use_colors': None
+ "version": 1,
+ "disable_existing_loggers": True,
+ "formatters": {
+ "default": {
+ "()": "uvicorn.logging.DefaultFormatter",
+ "fmt": "%(levelprefix)s %(message)s",
+ "use_colors": None,
+ },
+ "access": {
+ "()": "uvicorn.logging.AccessFormatter",
+ "fmt": '%(levelprefix)s %(client_addr)s - "%(request_line)s" %(status_code)s',
},
- 'access': {
- '()': 'uvicorn.logging.AccessFormatter',
- 'fmt': '%(levelprefix)s %(client_addr)s - "%(request_line)s" %(status_code)s'
- }
},
- 'handlers': {
- 'default': {
- 'formatter': 'default',
- 'class': 'logging.StreamHandler',
- 'stream': 'ext://sys.stderr'
+ "handlers": {
+ "default": {
+ "formatter": "default",
+ "class": "logging.StreamHandler",
+ "stream": "ext://sys.stderr",
+ },
+ "access": {
+ "formatter": "access",
+ "class": "logging.StreamHandler",
+ "stream": "ext://sys.stdout",
},
- 'access': {
- 'formatter': 'access',
- 'class': 'logging.StreamHandler',
- 'stream': 'ext://sys.stdout'
- }
},
- 'loggers': {
- 'uvicorn': {
- 'handlers': ['default'],
- 'level': 'INFO'
- },
- 'uvicorn.error': {
- 'level': 'INFO',
- 'handlers': ['default'],
- 'propagate': True
- },
- 'uvicorn.access': {
- 'handlers': ['access'],
- 'level': 'INFO',
- 'propagate': False
- }
- }
+ "loggers": {
+ "uvicorn": {"handlers": ["default"], "level": "INFO"},
+ "uvicorn.error": {"level": "INFO", "handlers": ["default"], "propagate": True},
+ "uvicorn.access": {"handlers": ["access"], "level": "INFO", "propagate": False},
+ },
}
local_headers = [
@@ -58,7 +48,8 @@ local_headers = [
("Access-Control-Allow-Origin", "https://localhost:3000"),
(
"Access-Control-Allow-Headers",
- "DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization",
+ "DNT,User-Agent,X-Requested-With,If-Modified-Since,"
+ + " Cache-Control,Content-Type,Range,Authorization",
),
("Access-Control-Expose-Headers", "Content-Length,Content-Range"),
("Access-Control-Allow-Credentials", "true"),
@@ -86,24 +77,20 @@ if __name__ == "__main__":
# log_config=log_settings,
log_level=None,
access_log=True,
- reload=want_reload
+ reload=want_reload,
) # , ssl_keyfile="discours.key", ssl_certfile="discours.crt")
elif x == "migrate":
from migration import process
+
print("MODE: MIGRATE")
process()
elif x == "bson":
from migration.bson2json import json_tables
+
print("MODE: BSON")
json_tables()
else:
sys.excepthook = exception_handler
- uvicorn.run(
- "main:app",
- host="0.0.0.0",
- port=PORT,
- proxy_headers=True,
- server_header=True
- )
+ uvicorn.run("main:app", host="0.0.0.0", port=PORT, proxy_headers=True, server_header=True)
diff --git a/services/following.py b/services/following.py
index 8410eb2d..a2be6af4 100644
--- a/services/following.py
+++ b/services/following.py
@@ -18,12 +18,7 @@ class Following:
class FollowingManager:
lock = asyncio.Lock()
- data = {
- 'author': [],
- 'topic': [],
- 'shout': [],
- 'chat': []
- }
+ data = {"author": [], "topic": [], "shout": [], "chat": []}
@staticmethod
async def register(kind, uid):
@@ -39,13 +34,13 @@ class FollowingManager:
async def push(kind, payload):
try:
async with FollowingManager.lock:
- if kind == 'chat':
- for chat in FollowingManager['chat']:
+ if kind == "chat":
+ for chat in FollowingManager["chat"]:
if payload.message["chatId"] == chat.uid:
chat.queue.put_nowait(payload)
else:
for entity in FollowingManager[kind]:
- if payload.shout['createdBy'] == entity.uid:
+ if payload.shout["createdBy"] == entity.uid:
entity.queue.put_nowait(payload)
except Exception as e:
print(Exception(e))
diff --git a/services/main.py b/services/main.py
index 10301b86..6397a5e5 100644
--- a/services/main.py
+++ b/services/main.py
@@ -1,13 +1,13 @@
+from base.orm import local_session
from services.search import SearchService
from services.stat.viewed import ViewedStorage
-from base.orm import local_session
async def storages_init():
with local_session() as session:
- print('[main] initialize SearchService')
+ print("[main] initialize SearchService")
await SearchService.init(session)
- print('[main] SearchService initialized')
- print('[main] initialize storages')
+ print("[main] SearchService initialized")
+ print("[main] initialize storages")
await ViewedStorage.init()
- print('[main] storages initialized')
+ print("[main] storages initialized")
diff --git a/services/notifications/notification_service.py b/services/notifications/notification_service.py
index 7e92aa95..e1109cff 100644
--- a/services/notifications/notification_service.py
+++ b/services/notifications/notification_service.py
@@ -5,32 +5,24 @@ from datetime import datetime, timezone
from sqlalchemy import and_
from base.orm import local_session
-from orm import Reaction, Shout, Notification, User
+from orm import Notification, Reaction, Shout, User
from orm.notification import NotificationType
from orm.reaction import ReactionKind
from services.notifications.sse import connection_manager
def shout_to_shout_data(shout):
- return {
- "title": shout.title,
- "slug": shout.slug
- }
+ return {"title": shout.title, "slug": shout.slug}
def user_to_user_data(user):
- return {
- "id": user.id,
- "name": user.name,
- "slug": user.slug,
- "userpic": user.userpic
- }
+ return {"id": user.id, "name": user.name, "slug": user.slug, "userpic": user.userpic}
def update_prev_notification(notification, user, reaction):
notification_data = json.loads(notification.data)
- notification_data["users"] = [u for u in notification_data["users"] if u['id'] != user.id]
+ notification_data["users"] = [u for u in notification_data["users"] if u["id"] != user.id]
notification_data["users"].append(user_to_user_data(user))
if notification_data["reactionIds"] is None:
@@ -57,34 +49,45 @@ class NewReactionNotificator:
if reaction.kind == ReactionKind.COMMENT:
parent_reaction = None
if reaction.replyTo:
- parent_reaction = session.query(Reaction).where(Reaction.id == reaction.replyTo).one()
+ parent_reaction = (
+ session.query(Reaction).where(Reaction.id == reaction.replyTo).one()
+ )
if parent_reaction.createdBy != reaction.createdBy:
- prev_new_reply_notification = session.query(Notification).where(
- and_(
- Notification.user == shout.createdBy,
- Notification.type == NotificationType.NEW_REPLY,
- Notification.shout == shout.id,
- Notification.reaction == parent_reaction.id,
- Notification.seen == False
+ prev_new_reply_notification = (
+ session.query(Notification)
+ .where(
+ and_(
+ Notification.user == shout.createdBy,
+ Notification.type == NotificationType.NEW_REPLY,
+ Notification.shout == shout.id,
+ Notification.reaction == parent_reaction.id,
+ Notification.seen == False, # noqa: E712
+ )
)
- ).first()
+ .first()
+ )
if prev_new_reply_notification:
update_prev_notification(prev_new_reply_notification, user, reaction)
else:
- reply_notification_data = json.dumps({
- "shout": shout_to_shout_data(shout),
- "users": [user_to_user_data(user)],
- "reactionIds": [reaction.id]
- }, ensure_ascii=False)
+ reply_notification_data = json.dumps(
+ {
+ "shout": shout_to_shout_data(shout),
+ "users": [user_to_user_data(user)],
+ "reactionIds": [reaction.id],
+ },
+ ensure_ascii=False,
+ )
- reply_notification = Notification.create(**{
- "user": parent_reaction.createdBy,
- "type": NotificationType.NEW_REPLY,
- "shout": shout.id,
- "reaction": parent_reaction.id,
- "data": reply_notification_data
- })
+ reply_notification = Notification.create(
+ **{
+ "user": parent_reaction.createdBy,
+ "type": NotificationType.NEW_REPLY,
+ "shout": shout.id,
+ "reaction": parent_reaction.id,
+ "data": reply_notification_data,
+ }
+ )
session.add(reply_notification)
@@ -93,30 +96,39 @@ class NewReactionNotificator:
if reaction.createdBy != shout.createdBy and (
parent_reaction is None or parent_reaction.createdBy != shout.createdBy
):
- prev_new_comment_notification = session.query(Notification).where(
- and_(
- Notification.user == shout.createdBy,
- Notification.type == NotificationType.NEW_COMMENT,
- Notification.shout == shout.id,
- Notification.seen == False
+ prev_new_comment_notification = (
+ session.query(Notification)
+ .where(
+ and_(
+ Notification.user == shout.createdBy,
+ Notification.type == NotificationType.NEW_COMMENT,
+ Notification.shout == shout.id,
+ Notification.seen == False, # noqa: E712
+ )
)
- ).first()
+ .first()
+ )
if prev_new_comment_notification:
update_prev_notification(prev_new_comment_notification, user, reaction)
else:
- notification_data_string = json.dumps({
- "shout": shout_to_shout_data(shout),
- "users": [user_to_user_data(user)],
- "reactionIds": [reaction.id]
- }, ensure_ascii=False)
+ notification_data_string = json.dumps(
+ {
+ "shout": shout_to_shout_data(shout),
+ "users": [user_to_user_data(user)],
+ "reactionIds": [reaction.id],
+ },
+ ensure_ascii=False,
+ )
- author_notification = Notification.create(**{
- "user": shout.createdBy,
- "type": NotificationType.NEW_COMMENT,
- "shout": shout.id,
- "data": notification_data_string
- })
+ author_notification = Notification.create(
+ **{
+ "user": shout.createdBy,
+ "type": NotificationType.NEW_COMMENT,
+ "shout": shout.id,
+ "data": notification_data_string,
+ }
+ )
session.add(author_notification)
@@ -142,7 +154,7 @@ class NotificationService:
try:
await notificator.run()
except Exception as e:
- print(f'[NotificationService.worker] error: {str(e)}')
+ print(f"[NotificationService.worker] error: {str(e)}")
notification_service = NotificationService()
diff --git a/services/notifications/sse.py b/services/notifications/sse.py
index 085dbde0..55cae575 100644
--- a/services/notifications/sse.py
+++ b/services/notifications/sse.py
@@ -1,8 +1,8 @@
+import asyncio
import json
from sse_starlette.sse import EventSourceResponse
from starlette.requests import Request
-import asyncio
class ConnectionManager:
@@ -28,9 +28,7 @@ class ConnectionManager:
return
for connection in self.connections_by_user_id[user_id]:
- data = {
- "type": "newNotifications"
- }
+ data = {"type": "newNotifications"}
data_string = json.dumps(data, ensure_ascii=False)
await connection.put(data_string)
diff --git a/services/search.py b/services/search.py
index 834e5bf7..610dd775 100644
--- a/services/search.py
+++ b/services/search.py
@@ -1,5 +1,7 @@
import asyncio
import json
+from typing import List
+
from base.redis import redis
from orm.shout import Shout
from resolvers.zine.load import load_shouts_by
@@ -7,25 +9,20 @@ from resolvers.zine.load import load_shouts_by
class SearchService:
lock = asyncio.Lock()
- cache = {}
+ # cache = {}
@staticmethod
async def init(session):
async with SearchService.lock:
- print('[search.service] did nothing')
- SearchService.cache = {}
+ print("[search.service] did nothing")
+ # SearchService.cache = {}
@staticmethod
- async def search(text, limit, offset) -> [Shout]:
+ async def search(text, limit, offset) -> List[Shout]:
cached = await redis.execute("GET", text)
if not cached:
async with SearchService.lock:
- options = {
- "title": text,
- "body": text,
- "limit": limit,
- "offset": offset
- }
+ options = {"title": text, "body": text, "limit": limit, "offset": offset}
payload = await load_shouts_by(None, None, options)
await redis.execute("SET", text, json.dumps(payload))
return payload
diff --git a/services/stat/viewed.py b/services/stat/viewed.py
index 905ade43..213440d9 100644
--- a/services/stat/viewed.py
+++ b/services/stat/viewed.py
@@ -1,18 +1,18 @@
import asyncio
import time
-from datetime import timedelta, timezone, datetime
+from datetime import datetime, timedelta, timezone
from os import environ, path
from ssl import create_default_context
from gql import Client, gql
from gql.transport.aiohttp import AIOHTTPTransport
-from sqlalchemy import func
from base.orm import local_session
-from orm import User, Topic
-from orm.shout import ShoutTopic, Shout
+from orm import Topic
+from orm.shout import Shout, ShoutTopic
-load_facts = gql("""
+load_facts = gql(
+ """
query getDomains {
domains {
id
@@ -25,9 +25,11 @@ query getDomains {
}
}
}
-""")
+"""
+)
-load_pages = gql("""
+load_pages = gql(
+ """
query getDomains {
domains {
title
@@ -41,8 +43,9 @@ query getDomains {
}
}
}
-""")
-schema_str = open(path.dirname(__file__) + '/ackee.graphql').read()
+"""
+)
+schema_str = open(path.dirname(__file__) + "/ackee.graphql").read()
token = environ.get("ACKEE_TOKEN", "")
@@ -50,10 +53,8 @@ def create_client(headers=None, schema=None):
return Client(
schema=schema,
transport=AIOHTTPTransport(
- url="https://ackee.discours.io/api",
- ssl=create_default_context(),
- headers=headers
- )
+ url="https://ackee.discours.io/api", ssl=create_default_context(), headers=headers
+ ),
)
@@ -71,13 +72,13 @@ class ViewedStorage:
@staticmethod
async def init():
- """ graphql client connection using permanent token """
+ """graphql client connection using permanent token"""
self = ViewedStorage
async with self.lock:
if token:
- self.client = create_client({
- "Authorization": "Bearer %s" % str(token)
- }, schema=schema_str)
+ self.client = create_client(
+ {"Authorization": "Bearer %s" % str(token)}, schema=schema_str
+ )
print("[stat.viewed] * authorized permanentely by ackee.discours.io: %s" % token)
else:
print("[stat.viewed] * please set ACKEE_TOKEN")
@@ -85,7 +86,7 @@ class ViewedStorage:
@staticmethod
async def update_pages():
- """ query all the pages from ackee sorted by views count """
+ """query all the pages from ackee sorted by views count"""
print("[stat.viewed] ⎧ updating ackee pages data ---")
start = time.time()
self = ViewedStorage
@@ -96,7 +97,7 @@ class ViewedStorage:
try:
for page in self.pages:
p = page["value"].split("?")[0]
- slug = p.split('discours.io/')[-1]
+ slug = p.split("discours.io/")[-1]
shouts[slug] = page["count"]
for slug in shouts.keys():
await ViewedStorage.increment(slug, shouts[slug])
@@ -118,7 +119,7 @@ class ViewedStorage:
# unused yet
@staticmethod
async def get_shout(shout_slug):
- """ getting shout views metric by slug """
+ """getting shout views metric by slug"""
self = ViewedStorage
async with self.lock:
shout_views = self.by_shouts.get(shout_slug)
@@ -136,7 +137,7 @@ class ViewedStorage:
@staticmethod
async def get_topic(topic_slug):
- """ getting topic views value summed """
+ """getting topic views value summed"""
self = ViewedStorage
topic_views = 0
async with self.lock:
@@ -146,24 +147,28 @@ class ViewedStorage:
@staticmethod
def update_topics(session, shout_slug):
- """ updates topics counters by shout slug """
+ """updates topics counters by shout slug"""
self = ViewedStorage
- for [shout_topic, topic] in session.query(ShoutTopic, Topic).join(Topic).join(Shout).where(
- Shout.slug == shout_slug
- ).all():
+ for [shout_topic, topic] in (
+ session.query(ShoutTopic, Topic)
+ .join(Topic)
+ .join(Shout)
+ .where(Shout.slug == shout_slug)
+ .all()
+ ):
if not self.by_topics.get(topic.slug):
self.by_topics[topic.slug] = {}
self.by_topics[topic.slug][shout_slug] = self.by_shouts[shout_slug]
@staticmethod
- async def increment(shout_slug, amount=1, viewer='ackee'):
- """ the only way to change views counter """
+ async def increment(shout_slug, amount=1, viewer="ackee"):
+ """the only way to change views counter"""
self = ViewedStorage
async with self.lock:
# TODO optimize, currenty we execute 1 DB transaction per shout
with local_session() as session:
shout = session.query(Shout).where(Shout.slug == shout_slug).one()
- if viewer == 'old-discours':
+ if viewer == "old-discours":
# this is needed for old db migration
if shout.viewsOld == amount:
print(f"viewsOld amount: {amount}")
@@ -185,7 +190,7 @@ class ViewedStorage:
@staticmethod
async def worker():
- """ async task worker """
+ """async task worker"""
failed = 0
self = ViewedStorage
if self.disabled:
@@ -205,9 +210,10 @@ class ViewedStorage:
if failed == 0:
when = datetime.now(timezone.utc) + timedelta(seconds=self.period)
t = format(when.astimezone().isoformat())
- print("[stat.viewed] ⎩ next update: %s" % (
- t.split("T")[0] + " " + t.split("T")[1].split(".")[0]
- ))
+ print(
+ "[stat.viewed] ⎩ next update: %s"
+ % (t.split("T")[0] + " " + t.split("T")[1].split(".")[0])
+ )
await asyncio.sleep(self.period)
else:
await asyncio.sleep(10)
diff --git a/settings.py b/settings.py
index 270b4551..f3da9952 100644
--- a/settings.py
+++ b/settings.py
@@ -3,8 +3,9 @@ from os import environ
PORT = 8080
DB_URL = (
- environ.get("DATABASE_URL") or environ.get("DB_URL") or
- "postgresql://postgres@localhost:5432/discoursio"
+ environ.get("DATABASE_URL")
+ or environ.get("DB_URL")
+ or "postgresql://postgres@localhost:5432/discoursio"
)
JWT_ALGORITHM = "HS256"
JWT_SECRET_KEY = environ.get("JWT_SECRET_KEY") or "8f1bd7696ffb482d8486dfbc6e7d16dd-secret-key"
@@ -30,4 +31,4 @@ SENTRY_DSN = environ.get("SENTRY_DSN")
SESSION_SECRET_KEY = environ.get("SESSION_SECRET_KEY") or "!secret"
# for local development
-DEV_SERVER_PID_FILE_NAME = 'dev-server.pid'
+DEV_SERVER_PID_FILE_NAME = "dev-server.pid"
diff --git a/setup.cfg b/setup.cfg
index 588918a1..dde3b963 100755
--- a/setup.cfg
+++ b/setup.cfg
@@ -1,23 +1,13 @@
[isort]
# https://github.com/PyCQA/isort
-line_length = 120
-multi_line_output = 3
-include_trailing_comma = true
-force_grid_wrap = 0
-use_parentheses = true
-force_alphabetical_sort = false
-
-[tool:brunette]
-# https://github.com/odwyersoftware/brunette
-line-length = 120
-single-quotes = false
+profile = black
[flake8]
# https://github.com/PyCQA/flake8
-exclude = .git,__pycache__,.mypy_cache,.vercel
-max-line-length = 120
-max-complexity = 15
-select = B,C,E,F,W,T4,B9
+exclude = .git,.mypy_cache,schema_types.py
+max-line-length = 100
+max-complexity = 10
+# select = B,C,E,F,W,T4,B9
# E203: Whitespace before ':'
# E266: Too many leading '#' for block comment
# E501: Line too long (82 > 79 characters)
@@ -25,15 +15,12 @@ select = B,C,E,F,W,T4,B9
# W503: Line break occurred before a binary operator
# F403: 'from module import *' used; unable to detect undefined names
# C901: Function is too complex
-ignore = E203,E266,E501,E722,W503,F403,C901
+# ignore = E203,E266,E501,E722,W503,F403,C901
+extend-ignore = E203
[mypy]
# https://github.com/python/mypy
-ignore_missing_imports = true
-warn_return_any = false
-warn_unused_configs = true
-disallow_untyped_calls = true
-disallow_untyped_defs = true
-disallow_incomplete_defs = true
-[mypy-api.*]
-ignore_errors = true
+exclude = schema_types.py
+explicit_package_bases = true
+check_untyped_defs = true
+plugins = sqlmypy
diff --git a/validations/auth.py b/validations/auth.py
index 216d7dcb..73b83079 100644
--- a/validations/auth.py
+++ b/validations/auth.py
@@ -1,4 +1,5 @@
from typing import Optional, Text
+
from pydantic import BaseModel
diff --git a/validations/inbox.py b/validations/inbox.py
index d03cca05..cf90da6f 100644
--- a/validations/inbox.py
+++ b/validations/inbox.py
@@ -1,4 +1,5 @@
-from typing import Optional, Text, List
+from typing import List, Optional, Text
+
from pydantic import BaseModel
@@ -20,6 +21,7 @@ class Member(BaseModel):
class Chat(BaseModel):
+ id: int
createdAt: int
createdBy: int
users: List[int]
From 4395e3a72d4e9ca97f31e263e20de14d065208b2 Mon Sep 17 00:00:00 2001
From: Igor Lobanov
Date: Mon, 30 Oct 2023 22:09:04 +0100
Subject: [PATCH 10/27] build fix
---
.github/workflows/checks.yml | 2 +-
lint.sh => checks.sh | 0
2 files changed, 1 insertion(+), 1 deletion(-)
rename lint.sh => checks.sh (100%)
diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml
index c80dd279..479ad80b 100644
--- a/.github/workflows/checks.yml
+++ b/.github/workflows/checks.yml
@@ -13,4 +13,4 @@ jobs:
- run: pip install --upgrade pip
- run: pip install -r requirements.txt
- run: pip install -r requirements-dev.txt
- - run: check.sh
+ - run: checks.sh
diff --git a/lint.sh b/checks.sh
similarity index 100%
rename from lint.sh
rename to checks.sh
From 756a80151ac3f586fe7a707f199b7bc0dcb4757d Mon Sep 17 00:00:00 2001
From: Igor Lobanov
Date: Mon, 30 Oct 2023 22:32:04 +0100
Subject: [PATCH 11/27] build fix
---
.github/workflows/checks.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml
index 479ad80b..8120f2ae 100644
--- a/.github/workflows/checks.yml
+++ b/.github/workflows/checks.yml
@@ -9,7 +9,7 @@ jobs:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
- python-version: 3.10
+ python-version: 3.10.6
- run: pip install --upgrade pip
- run: pip install -r requirements.txt
- run: pip install -r requirements-dev.txt
From eaca3d613dfd7679667197c7f229bf4e9092d508 Mon Sep 17 00:00:00 2001
From: Igor Lobanov
Date: Mon, 30 Oct 2023 22:34:59 +0100
Subject: [PATCH 12/27] build fix
---
.github/workflows/checks.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml
index 8120f2ae..794ec49c 100644
--- a/.github/workflows/checks.yml
+++ b/.github/workflows/checks.yml
@@ -13,4 +13,4 @@ jobs:
- run: pip install --upgrade pip
- run: pip install -r requirements.txt
- run: pip install -r requirements-dev.txt
- - run: checks.sh
+ - run: ./checks.sh
From 34d04e42400a66569e7f6b629ca9deb28c41d71c Mon Sep 17 00:00:00 2001
From: Igor Lobanov
Date: Tue, 31 Oct 2023 14:52:58 +0100
Subject: [PATCH 13/27] my feed query fixed
---
README.md | 23 +++++++++++++----------
resolvers/zine/load.py | 17 +++++++++++------
2 files changed, 24 insertions(+), 16 deletions(-)
diff --git a/README.md b/README.md
index 7081fbca..12e902db 100644
--- a/README.md
+++ b/README.md
@@ -7,10 +7,6 @@
- starlette
- uvicorn
-# Local development
-
-Install deps first
-
on osx
```
brew install redis nginx postgres
@@ -22,16 +18,23 @@ on debian/ubuntu
apt install redis nginx
```
-First, install Postgres. Then you'll need some data, so migrate it:
+# Local development
+
+Install deps first
+
```
-createdb discoursio
-python server.py migrate
+pip install -r requirements.txt
+pip install -r requirements-dev.txt
+pre-commit install
```
-Then run nginx, redis and API server
+Create database from backup
+```
+./restdb.sh
+```
+
+Start local server
```
-redis-server
-pip install -r requirements.txt
python3 server.py dev
```
diff --git a/resolvers/zine/load.py b/resolvers/zine/load.py
index 95fac914..186fe347 100644
--- a/resolvers/zine/load.py
+++ b/resolvers/zine/load.py
@@ -210,12 +210,17 @@ async def get_my_feed(_, info, options):
auth: AuthCredentials = info.context["request"].auth
user_id = auth.user_id
+ user_followed_authors = select(AuthorFollower.author).where(AuthorFollower.follower == user_id)
+ user_followed_topics = select(TopicFollower.topic).where(TopicFollower.follower == user_id)
+
subquery = (
select(Shout.id)
- .join(ShoutAuthor)
- .join(AuthorFollower, AuthorFollower.follower == user_id)
- .join(ShoutTopic)
- .join(TopicFollower, TopicFollower.follower == user_id)
+ .where(Shout.id == ShoutAuthor.shout)
+ .where(Shout.id == ShoutTopic.shout)
+ .where(
+ (ShoutAuthor.user.in_(user_followed_authors))
+ | (ShoutTopic.topic.in_(user_followed_topics))
+ )
)
q = (
@@ -240,9 +245,10 @@ async def get_my_feed(_, info, options):
q = q.group_by(Shout.id).order_by(nulls_last(query_order_by)).limit(limit).offset(offset)
+ # print(q.compile(compile_kwargs={"literal_binds": True}))
+
shouts = []
with local_session() as session:
- shouts_map = {}
for [shout, reacted_stat, commented_stat, rating_stat, last_comment] in session.execute(
q
).unique():
@@ -253,6 +259,5 @@ async def get_my_feed(_, info, options):
"commented": commented_stat,
"rating": rating_stat,
}
- shouts_map[shout.id] = shout
return shouts
From 21316187e0c0d0d154ed3f7338768925e07fd5d6 Mon Sep 17 00:00:00 2001
From: Ilya Y <75578537+ilya-bkv@users.noreply.github.com>
Date: Tue, 31 Oct 2023 18:48:00 +0300
Subject: [PATCH 14/27] Fix/deploy fix (#103)
* deploy fix
---------
Co-authored-by: Igor Lobanov
---
CHECKS | 2 +-
base/orm.py | 2 +-
schema_types.py | 1157 -----------------------------------------------
3 files changed, 2 insertions(+), 1159 deletions(-)
delete mode 100644 schema_types.py
diff --git a/CHECKS b/CHECKS
index 738277fe..bdfcc6fe 100644
--- a/CHECKS
+++ b/CHECKS
@@ -1,5 +1,5 @@
WAIT=10
TIMEOUT=10
-ATTEMPTS=10
+ATTEMPTS=3
/
diff --git a/base/orm.py b/base/orm.py
index 6f1e2ce4..2f4ce11c 100644
--- a/base/orm.py
+++ b/base/orm.py
@@ -30,7 +30,7 @@ class Base(DeclarativeBase):
__abstract__ = True
__table_args__ = {"extend_existing": True}
- id: Column | None = Column(Integer, primary_key=True)
+ id = Column(Integer, primary_key=True)
def __init_subclass__(cls, **kwargs):
REGISTRY[cls.__name__] = cls
diff --git a/schema_types.py b/schema_types.py
deleted file mode 100644
index 99b3a68f..00000000
--- a/schema_types.py
+++ /dev/null
@@ -1,1157 +0,0 @@
-from enum import Enum
-from typing import Any, ClassVar, List, Optional, TypedDict
-
-## Scalars
-
-DateTime = Any
-
-MessageStatus = Enum("MessageStatus", "NEW UPDATED DELETED")
-
-
-ReactionStatus = Enum("ReactionStatus", "NEW UPDATED CHANGED EXPLAINED DELETED")
-
-
-FollowingEntity = Enum("FollowingEntity", "TOPIC AUTHOR COMMUNITY REACTIONS")
-
-
-ReactionKind = Enum(
- "ReactionKind",
- "LIKE DISLIKE AGREE DISAGREE PROOF DISPROOF COMMENT QUOTE PROPOSE ASK REMARK FOOTNOTE ACCEPT REJECT",
-)
-
-
-NotificationType = Enum("NotificationType", "NEW_COMMENT NEW_REPLY")
-
-
-AuthResult = TypedDict(
- "AuthResult",
- {
- "error": Optional[str],
- "token": Optional[str],
- "user": Optional["User"],
- },
-)
-
-
-ChatMember = TypedDict(
- "ChatMember",
- {
- "id": int,
- "slug": str,
- "name": str,
- "userpic": Optional[str],
- "lastSeen": Optional["DateTime"],
- "online": Optional[bool],
- },
-)
-
-
-AuthorStat = TypedDict(
- "AuthorStat",
- {
- "followings": Optional[int],
- "followers": Optional[int],
- "rating": Optional[int],
- "commented": Optional[int],
- "shouts": Optional[int],
- },
-)
-
-
-Author = TypedDict(
- "Author",
- {
- "id": int,
- "slug": str,
- "name": str,
- "userpic": Optional[str],
- "caption": Optional[str],
- "bio": Optional[str],
- "about": Optional[str],
- "links": Optional[List[str]],
- "stat": Optional["AuthorStat"],
- "roles": Optional[List["Role"]],
- "lastSeen": Optional["DateTime"],
- "createdAt": Optional["DateTime"],
- },
-)
-
-
-Result = TypedDict(
- "Result",
- {
- "error": Optional[str],
- "slugs": Optional[List[str]],
- "chat": Optional["Chat"],
- "chats": Optional[List["Chat"]],
- "message": Optional["Message"],
- "messages": Optional[List["Message"]],
- "members": Optional[List["ChatMember"]],
- "shout": Optional["Shout"],
- "shouts": Optional[List["Shout"]],
- "author": Optional["Author"],
- "authors": Optional[List["Author"]],
- "reaction": Optional["Reaction"],
- "reactions": Optional[List["Reaction"]],
- "topic": Optional["Topic"],
- "topics": Optional[List["Topic"]],
- "community": Optional["Community"],
- "communities": Optional[List["Community"]],
- },
-)
-
-
-ReactionUpdating = TypedDict(
- "ReactionUpdating",
- {
- "error": Optional[str],
- "status": Optional["ReactionStatus"],
- "reaction": Optional["Reaction"],
- },
-)
-
-
-Mutation = TypedDict(
- "Mutation",
- {
- "createChat": "CreateChatMutationResult",
- "updateChat": "UpdateChatMutationResult",
- "deleteChat": "DeleteChatMutationResult",
- "createMessage": "CreateMessageMutationResult",
- "updateMessage": "UpdateMessageMutationResult",
- "deleteMessage": "DeleteMessageMutationResult",
- "markAsRead": "MarkAsReadMutationResult",
- "getSession": "GetSessionMutationResult",
- "registerUser": "RegisterUserMutationResult",
- "sendLink": "SendLinkMutationResult",
- "confirmEmail": "ConfirmEmailMutationResult",
- "createShout": "CreateShoutMutationResult",
- "updateShout": "UpdateShoutMutationResult",
- "deleteShout": "DeleteShoutMutationResult",
- "rateUser": "RateUserMutationResult",
- "updateProfile": "UpdateProfileMutationResult",
- "createTopic": "CreateTopicMutationResult",
- "updateTopic": "UpdateTopicMutationResult",
- "destroyTopic": "DestroyTopicMutationResult",
- "createReaction": "CreateReactionMutationResult",
- "updateReaction": "UpdateReactionMutationResult",
- "deleteReaction": "DeleteReactionMutationResult",
- "follow": "FollowMutationResult",
- "unfollow": "UnfollowMutationResult",
- "markNotificationAsRead": "MarkNotificationAsReadMutationResult",
- "markAllNotificationsAsRead": "MarkAllNotificationsAsReadMutationResult",
- },
-)
-
-
-CreateChatParams = TypedDict(
- "CreateChatParams",
- {
- "title": Optional[str],
- "members": List[int],
- },
-)
-
-
-CreateChatMutationResult = ClassVar["Result"]
-
-
-UpdateChatParams = TypedDict(
- "UpdateChatParams",
- {
- "chat": "ChatInput",
- },
-)
-
-
-UpdateChatMutationResult = ClassVar["Result"]
-
-
-DeleteChatParams = TypedDict(
- "DeleteChatParams",
- {
- "chatId": str,
- },
-)
-
-
-DeleteChatMutationResult = ClassVar["Result"]
-
-
-CreateMessageParams = TypedDict(
- "CreateMessageParams",
- {
- "chat": str,
- "body": str,
- "replyTo": Optional[int],
- },
-)
-
-
-CreateMessageMutationResult = ClassVar["Result"]
-
-
-UpdateMessageParams = TypedDict(
- "UpdateMessageParams",
- {
- "chatId": str,
- "id": int,
- "body": str,
- },
-)
-
-
-UpdateMessageMutationResult = ClassVar["Result"]
-
-
-DeleteMessageParams = TypedDict(
- "DeleteMessageParams",
- {
- "chatId": str,
- "id": int,
- },
-)
-
-
-DeleteMessageMutationResult = ClassVar["Result"]
-
-
-MarkAsReadParams = TypedDict(
- "MarkAsReadParams",
- {
- "chatId": str,
- "ids": List[int],
- },
-)
-
-
-MarkAsReadMutationResult = ClassVar["Result"]
-
-
-GetSessionMutationResult = ClassVar["AuthResult"]
-
-
-RegisterUserParams = TypedDict(
- "RegisterUserParams",
- {
- "email": str,
- "password": Optional[str],
- "name": Optional[str],
- },
-)
-
-
-RegisterUserMutationResult = ClassVar["AuthResult"]
-
-
-SendLinkParams = TypedDict(
- "SendLinkParams",
- {
- "email": str,
- "lang": Optional[str],
- "template": Optional[str],
- },
-)
-
-
-SendLinkMutationResult = ClassVar["Result"]
-
-
-ConfirmEmailParams = TypedDict(
- "ConfirmEmailParams",
- {
- "token": str,
- },
-)
-
-
-ConfirmEmailMutationResult = ClassVar["AuthResult"]
-
-
-CreateShoutParams = TypedDict(
- "CreateShoutParams",
- {
- "inp": "ShoutInput",
- },
-)
-
-
-CreateShoutMutationResult = ClassVar["Result"]
-
-
-UpdateShoutParams = TypedDict(
- "UpdateShoutParams",
- {
- "shout_id": int,
- "shout_input": Optional["ShoutInput"],
- "publish": Optional[bool],
- },
-)
-
-
-UpdateShoutMutationResult = ClassVar["Result"]
-
-
-DeleteShoutParams = TypedDict(
- "DeleteShoutParams",
- {
- "shout_id": int,
- },
-)
-
-
-DeleteShoutMutationResult = ClassVar["Result"]
-
-
-RateUserParams = TypedDict(
- "RateUserParams",
- {
- "slug": str,
- "value": int,
- },
-)
-
-
-RateUserMutationResult = ClassVar["Result"]
-
-
-UpdateProfileParams = TypedDict(
- "UpdateProfileParams",
- {
- "profile": "ProfileInput",
- },
-)
-
-
-UpdateProfileMutationResult = ClassVar["Result"]
-
-
-CreateTopicParams = TypedDict(
- "CreateTopicParams",
- {
- "input": "TopicInput",
- },
-)
-
-
-CreateTopicMutationResult = ClassVar["Result"]
-
-
-UpdateTopicParams = TypedDict(
- "UpdateTopicParams",
- {
- "input": "TopicInput",
- },
-)
-
-
-UpdateTopicMutationResult = ClassVar["Result"]
-
-
-DestroyTopicParams = TypedDict(
- "DestroyTopicParams",
- {
- "slug": str,
- },
-)
-
-
-DestroyTopicMutationResult = ClassVar["Result"]
-
-
-CreateReactionParams = TypedDict(
- "CreateReactionParams",
- {
- "reaction": "ReactionInput",
- },
-)
-
-
-CreateReactionMutationResult = ClassVar["Result"]
-
-
-UpdateReactionParams = TypedDict(
- "UpdateReactionParams",
- {
- "id": int,
- "reaction": "ReactionInput",
- },
-)
-
-
-UpdateReactionMutationResult = ClassVar["Result"]
-
-
-DeleteReactionParams = TypedDict(
- "DeleteReactionParams",
- {
- "id": int,
- },
-)
-
-
-DeleteReactionMutationResult = ClassVar["Result"]
-
-
-FollowParams = TypedDict(
- "FollowParams",
- {
- "what": "FollowingEntity",
- "slug": str,
- },
-)
-
-
-FollowMutationResult = ClassVar["Result"]
-
-
-UnfollowParams = TypedDict(
- "UnfollowParams",
- {
- "what": "FollowingEntity",
- "slug": str,
- },
-)
-
-
-UnfollowMutationResult = ClassVar["Result"]
-
-
-MarkNotificationAsReadParams = TypedDict(
- "MarkNotificationAsReadParams",
- {
- "notification_id": int,
- },
-)
-
-
-MarkNotificationAsReadMutationResult = ClassVar["Result"]
-
-
-MarkAllNotificationsAsReadMutationResult = ClassVar["Result"]
-
-
-NotificationsQueryResult = TypedDict(
- "NotificationsQueryResult",
- {
- "notifications": List["Notification"],
- "totalCount": int,
- "totalUnreadCount": int,
- },
-)
-
-
-MySubscriptionsQueryResult = TypedDict(
- "MySubscriptionsQueryResult",
- {
- "topics": List["Topic"],
- "authors": List["Author"],
- },
-)
-
-
-Query = TypedDict(
- "Query",
- {
- "loadChats": "LoadChatsQueryResult",
- "loadMessagesBy": "LoadMessagesByQueryResult",
- "loadRecipients": "LoadRecipientsQueryResult",
- "searchRecipients": "SearchRecipientsQueryResult",
- "searchMessages": "SearchMessagesQueryResult",
- "isEmailUsed": "IsEmailUsedQueryResult",
- "signIn": "SignInQueryResult",
- "signOut": "SignOutQueryResult",
- "loadAuthorsBy": "LoadAuthorsByQueryResult",
- "loadShout": "LoadShoutQueryResult",
- "loadShouts": "LoadShoutsQueryResult",
- "loadDrafts": "LoadDraftsQueryResult",
- "loadReactionsBy": "LoadReactionsByQueryResult",
- "userFollowers": "UserFollowersQueryResult",
- "userFollowedAuthors": "UserFollowedAuthorsQueryResult",
- "userFollowedTopics": "UserFollowedTopicsQueryResult",
- "authorsAll": "AuthorsAllQueryResult",
- "getAuthor": "GetAuthorQueryResult",
- "myFeed": "MyFeedQueryResult",
- "markdownBody": "MarkdownBodyQueryResult",
- "getTopic": "GetTopicQueryResult",
- "topicsAll": "TopicsAllQueryResult",
- "topicsRandom": "TopicsRandomQueryResult",
- "topicsByCommunity": "TopicsByCommunityQueryResult",
- "topicsByAuthor": "TopicsByAuthorQueryResult",
- "loadNotifications": "LoadNotificationsQueryResult",
- "loadMySubscriptions": "LoadMySubscriptionsQueryResult",
- },
-)
-
-
-LoadChatsParams = TypedDict(
- "LoadChatsParams",
- {
- "limit": Optional[int],
- "offset": Optional[int],
- },
-)
-
-
-LoadChatsQueryResult = ClassVar["Result"]
-
-
-LoadMessagesByParams = TypedDict(
- "LoadMessagesByParams",
- {
- "by": "MessagesBy",
- "limit": Optional[int],
- "offset": Optional[int],
- },
-)
-
-
-LoadMessagesByQueryResult = ClassVar["Result"]
-
-
-LoadRecipientsParams = TypedDict(
- "LoadRecipientsParams",
- {
- "limit": Optional[int],
- "offset": Optional[int],
- },
-)
-
-
-LoadRecipientsQueryResult = ClassVar["Result"]
-
-
-SearchRecipientsParams = TypedDict(
- "SearchRecipientsParams",
- {
- "query": str,
- "limit": Optional[int],
- "offset": Optional[int],
- },
-)
-
-
-SearchRecipientsQueryResult = ClassVar["Result"]
-
-
-SearchMessagesParams = TypedDict(
- "SearchMessagesParams",
- {
- "by": "MessagesBy",
- "limit": Optional[int],
- "offset": Optional[int],
- },
-)
-
-
-SearchMessagesQueryResult = ClassVar["Result"]
-
-
-IsEmailUsedParams = TypedDict(
- "IsEmailUsedParams",
- {
- "email": str,
- },
-)
-
-
-IsEmailUsedQueryResult = bool
-
-
-SignInParams = TypedDict(
- "SignInParams",
- {
- "email": str,
- "password": Optional[str],
- "lang": Optional[str],
- },
-)
-
-
-SignInQueryResult = ClassVar["AuthResult"]
-
-
-SignOutQueryResult = ClassVar["AuthResult"]
-
-
-LoadAuthorsByParams = TypedDict(
- "LoadAuthorsByParams",
- {
- "by": Optional["AuthorsBy"],
- "limit": Optional[int],
- "offset": Optional[int],
- },
-)
-
-
-LoadAuthorsByQueryResult = ClassVar[List["Author"]]
-
-
-LoadShoutParams = TypedDict(
- "LoadShoutParams",
- {
- "slug": Optional[str],
- "shout_id": Optional[int],
- },
-)
-
-
-LoadShoutQueryResult = ClassVar[Optional["Shout"]]
-
-
-LoadShoutsParams = TypedDict(
- "LoadShoutsParams",
- {
- "options": Optional["LoadShoutsOptions"],
- },
-)
-
-
-LoadShoutsQueryResult = ClassVar[List["Shout"]]
-
-
-LoadDraftsQueryResult = ClassVar[List["Shout"]]
-
-
-LoadReactionsByParams = TypedDict(
- "LoadReactionsByParams",
- {
- "by": "ReactionBy",
- "limit": Optional[int],
- "offset": Optional[int],
- },
-)
-
-
-LoadReactionsByQueryResult = ClassVar[List["Reaction"]]
-
-
-UserFollowersParams = TypedDict(
- "UserFollowersParams",
- {
- "slug": str,
- },
-)
-
-
-UserFollowersQueryResult = ClassVar[List["Author"]]
-
-
-UserFollowedAuthorsParams = TypedDict(
- "UserFollowedAuthorsParams",
- {
- "slug": str,
- },
-)
-
-
-UserFollowedAuthorsQueryResult = ClassVar[List["Author"]]
-
-
-UserFollowedTopicsParams = TypedDict(
- "UserFollowedTopicsParams",
- {
- "slug": str,
- },
-)
-
-
-UserFollowedTopicsQueryResult = ClassVar[List["Topic"]]
-
-
-AuthorsAllQueryResult = ClassVar[List["Author"]]
-
-
-GetAuthorParams = TypedDict(
- "GetAuthorParams",
- {
- "slug": str,
- },
-)
-
-
-GetAuthorQueryResult = ClassVar[Optional["Author"]]
-
-
-MyFeedParams = TypedDict(
- "MyFeedParams",
- {
- "options": Optional["LoadShoutsOptions"],
- },
-)
-
-
-MyFeedQueryResult = ClassVar[Optional[List["Shout"]]]
-
-
-MarkdownBodyParams = TypedDict(
- "MarkdownBodyParams",
- {
- "body": str,
- },
-)
-
-
-MarkdownBodyQueryResult = str
-
-
-GetTopicParams = TypedDict(
- "GetTopicParams",
- {
- "slug": str,
- },
-)
-
-
-GetTopicQueryResult = ClassVar[Optional["Topic"]]
-
-
-TopicsAllQueryResult = ClassVar[List["Topic"]]
-
-
-TopicsRandomParams = TypedDict(
- "TopicsRandomParams",
- {
- "amount": Optional[int],
- },
-)
-
-
-TopicsRandomQueryResult = ClassVar[List["Topic"]]
-
-
-TopicsByCommunityParams = TypedDict(
- "TopicsByCommunityParams",
- {
- "community": str,
- },
-)
-
-
-TopicsByCommunityQueryResult = ClassVar[List["Topic"]]
-
-
-TopicsByAuthorParams = TypedDict(
- "TopicsByAuthorParams",
- {
- "author": str,
- },
-)
-
-
-TopicsByAuthorQueryResult = ClassVar[List["Topic"]]
-
-
-LoadNotificationsParams = TypedDict(
- "LoadNotificationsParams",
- {
- "params": "NotificationsQueryParams",
- },
-)
-
-
-LoadNotificationsQueryResult = ClassVar["NotificationsQueryResult"]
-
-
-LoadMySubscriptionsQueryResult = ClassVar[Optional["MySubscriptionsQueryResult"]]
-
-
-Resource = TypedDict(
- "Resource",
- {
- "id": int,
- "name": str,
- },
-)
-
-
-Operation = TypedDict(
- "Operation",
- {
- "id": int,
- "name": str,
- },
-)
-
-
-Permission = TypedDict(
- "Permission",
- {
- "operation": int,
- "resource": int,
- },
-)
-
-
-Role = TypedDict(
- "Role",
- {
- "id": int,
- "name": str,
- "community": str,
- "desc": Optional[str],
- "permissions": List["Permission"],
- },
-)
-
-
-Rating = TypedDict(
- "Rating",
- {
- "rater": str,
- "value": int,
- },
-)
-
-
-User = TypedDict(
- "User",
- {
- "id": int,
- "username": str,
- "createdAt": "DateTime",
- "lastSeen": Optional["DateTime"],
- "slug": str,
- "name": Optional[str],
- "email": Optional[str],
- "password": Optional[str],
- "oauth": Optional[str],
- "userpic": Optional[str],
- "links": Optional[List[str]],
- "emailConfirmed": Optional[bool],
- "muted": Optional[bool],
- "updatedAt": Optional["DateTime"],
- "ratings": Optional[List["Rating"]],
- "bio": Optional[str],
- "about": Optional[str],
- "communities": Optional[List[int]],
- "oid": Optional[str],
- },
-)
-
-
-Reaction = TypedDict(
- "Reaction",
- {
- "id": int,
- "shout": "Shout",
- "createdAt": "DateTime",
- "createdBy": "User",
- "updatedAt": Optional["DateTime"],
- "deletedAt": Optional["DateTime"],
- "deletedBy": Optional["User"],
- "range": Optional[str],
- "kind": "ReactionKind",
- "body": Optional[str],
- "replyTo": Optional[int],
- "stat": Optional["Stat"],
- "old_id": Optional[str],
- "old_thread": Optional[str],
- },
-)
-
-
-Shout = TypedDict(
- "Shout",
- {
- "id": int,
- "slug": str,
- "body": str,
- "lead": Optional[str],
- "description": Optional[str],
- "createdAt": "DateTime",
- "topics": Optional[List["Topic"]],
- "mainTopic": Optional[str],
- "title": Optional[str],
- "subtitle": Optional[str],
- "authors": Optional[List["Author"]],
- "lang": Optional[str],
- "community": Optional[str],
- "cover": Optional[str],
- "layout": Optional[str],
- "versionOf": Optional[str],
- "visibility": Optional[str],
- "updatedAt": Optional["DateTime"],
- "updatedBy": Optional["User"],
- "deletedAt": Optional["DateTime"],
- "deletedBy": Optional["User"],
- "publishedAt": Optional["DateTime"],
- "media": Optional[str],
- "stat": Optional["Stat"],
- },
-)
-
-
-Stat = TypedDict(
- "Stat",
- {
- "viewed": Optional[int],
- "reacted": Optional[int],
- "rating": Optional[int],
- "commented": Optional[int],
- "ranking": Optional[int],
- },
-)
-
-
-Community = TypedDict(
- "Community",
- {
- "id": int,
- "slug": str,
- "name": str,
- "desc": Optional[str],
- "pic": str,
- "createdAt": "DateTime",
- "createdBy": "User",
- },
-)
-
-
-Collection = TypedDict(
- "Collection",
- {
- "id": int,
- "slug": str,
- "title": str,
- "desc": Optional[str],
- "amount": Optional[int],
- "publishedAt": Optional["DateTime"],
- "createdAt": "DateTime",
- "createdBy": "User",
- },
-)
-
-
-TopicStat = TypedDict(
- "TopicStat",
- {
- "shouts": int,
- "followers": int,
- "authors": int,
- },
-)
-
-
-Topic = TypedDict(
- "Topic",
- {
- "id": int,
- "slug": str,
- "title": Optional[str],
- "body": Optional[str],
- "pic": Optional[str],
- "stat": Optional["TopicStat"],
- "oid": Optional[str],
- },
-)
-
-
-Token = TypedDict(
- "Token",
- {
- "createdAt": "DateTime",
- "expiresAt": Optional["DateTime"],
- "id": int,
- "ownerId": int,
- "usedAt": Optional["DateTime"],
- "value": str,
- },
-)
-
-
-Message = TypedDict(
- "Message",
- {
- "author": int,
- "chatId": str,
- "body": str,
- "createdAt": int,
- "id": int,
- "replyTo": Optional[int],
- "updatedAt": Optional[int],
- "seen": Optional[bool],
- },
-)
-
-
-Chat = TypedDict(
- "Chat",
- {
- "id": str,
- "createdAt": int,
- "createdBy": int,
- "updatedAt": int,
- "title": Optional[str],
- "description": Optional[str],
- "users": Optional[List[int]],
- "members": Optional[List["ChatMember"]],
- "admins": Optional[List[int]],
- "messages": Optional[List["Message"]],
- "unread": Optional[int],
- "private": Optional[bool],
- },
-)
-
-
-Notification = TypedDict(
- "Notification",
- {
- "id": int,
- "shout": Optional[int],
- "reaction": Optional[int],
- "type": "NotificationType",
- "createdAt": "DateTime",
- "seen": bool,
- "data": Optional[str],
- "occurrences": int,
- },
-)
-
-
-ShoutInput = TypedDict(
- "ShoutInput",
- {
- "slug": Optional[str],
- "title": Optional[str],
- "body": Optional[str],
- "lead": Optional[str],
- "description": Optional[str],
- "layout": Optional[str],
- "media": Optional[str],
- "authors": Optional[List[str]],
- "topics": Optional[List["TopicInput"]],
- "community": Optional[int],
- "mainTopic": Optional["TopicInput"],
- "subtitle": Optional[str],
- "cover": Optional[str],
- },
-)
-
-
-ProfileInput = TypedDict(
- "ProfileInput",
- {
- "slug": Optional[str],
- "name": Optional[str],
- "userpic": Optional[str],
- "links": Optional[List[str]],
- "bio": Optional[str],
- "about": Optional[str],
- },
-)
-
-
-TopicInput = TypedDict(
- "TopicInput",
- {
- "id": Optional[int],
- "slug": str,
- "title": Optional[str],
- "body": Optional[str],
- "pic": Optional[str],
- },
-)
-
-
-ReactionInput = TypedDict(
- "ReactionInput",
- {
- "kind": "ReactionKind",
- "shout": int,
- "range": Optional[str],
- "body": Optional[str],
- "replyTo": Optional[int],
- },
-)
-
-
-ChatInput = TypedDict(
- "ChatInput",
- {
- "id": str,
- "title": Optional[str],
- "description": Optional[str],
- },
-)
-
-
-MessagesBy = TypedDict(
- "MessagesBy",
- {
- "author": Optional[str],
- "body": Optional[str],
- "chat": Optional[str],
- "order": Optional[str],
- "days": Optional[int],
- "stat": Optional[str],
- },
-)
-
-
-AuthorsBy = TypedDict(
- "AuthorsBy",
- {
- "lastSeen": Optional["DateTime"],
- "createdAt": Optional["DateTime"],
- "slug": Optional[str],
- "name": Optional[str],
- "topic": Optional[str],
- "order": Optional[str],
- "days": Optional[int],
- "stat": Optional[str],
- },
-)
-
-
-LoadShoutsFilters = TypedDict(
- "LoadShoutsFilters",
- {
- "title": Optional[str],
- "body": Optional[str],
- "topic": Optional[str],
- "author": Optional[str],
- "layout": Optional[str],
- "excludeLayout": Optional[str],
- "visibility": Optional[str],
- "days": Optional[int],
- "reacted": Optional[bool],
- },
-)
-
-
-LoadShoutsOptions = TypedDict(
- "LoadShoutsOptions",
- {
- "filters": Optional["LoadShoutsFilters"],
- "with_author_captions": Optional[bool],
- "limit": int,
- "offset": Optional[int],
- "order_by": Optional[str],
- "order_by_desc": Optional[bool],
- },
-)
-
-
-ReactionBy = TypedDict(
- "ReactionBy",
- {
- "shout": Optional[str],
- "shouts": Optional[List[str]],
- "search": Optional[str],
- "comment": Optional[bool],
- "topic": Optional[str],
- "createdBy": Optional[str],
- "days": Optional[int],
- "sort": Optional[str],
- },
-)
-
-
-NotificationsQueryParams = TypedDict(
- "NotificationsQueryParams",
- {
- "limit": Optional[int],
- "offset": Optional[int],
- },
-)
From 0da4e110c1cbe45c13ffb102bcf990782b6e5caa Mon Sep 17 00:00:00 2001
From: Ilya Y <75578537+ilya-bkv@users.noreply.github.com>
Date: Sat, 4 Nov 2023 19:44:58 +0300
Subject: [PATCH 15/27] test article (#104)
Co-authored-by: Igor Lobanov
---
resolvers/zine/load.py | 10 ++++++++++
services/notifications/notification_service.py | 2 +-
test/test.json | 6 +++---
3 files changed, 14 insertions(+), 4 deletions(-)
diff --git a/resolvers/zine/load.py b/resolvers/zine/load.py
index 186fe347..8defe383 100644
--- a/resolvers/zine/load.py
+++ b/resolvers/zine/load.py
@@ -1,3 +1,4 @@
+import json
from datetime import datetime, timedelta, timezone
from sqlalchemy.orm import aliased, joinedload
@@ -79,6 +80,15 @@ def apply_filters(q, filters, user_id=None): # noqa: C901
@query.field("loadShout")
async def load_shout(_, info, slug=None, shout_id=None):
+ # for testing, soon will be removed
+ if slug == "testtesttest":
+ with open("test/test.json") as json_file:
+ test_shout = json.load(json_file)["data"]["loadShout"]
+ test_shout["createdAt"] = datetime.fromisoformat(test_shout["createdAt"])
+ test_shout["publishedAt"] = datetime.fromisoformat(test_shout["publishedAt"])
+ print(test_shout)
+ return test_shout
+
with local_session() as session:
q = select(Shout).options(
joinedload(Shout.authors),
diff --git a/services/notifications/notification_service.py b/services/notifications/notification_service.py
index e1109cff..1d94ab9c 100644
--- a/services/notifications/notification_service.py
+++ b/services/notifications/notification_service.py
@@ -142,7 +142,7 @@ class NewReactionNotificator:
class NotificationService:
def __init__(self):
- self._queue = asyncio.Queue()
+ self._queue = asyncio.Queue(maxsize=1000)
async def handle_new_reaction(self, reaction_id):
notificator = NewReactionNotificator(reaction_id)
diff --git a/test/test.json b/test/test.json
index 0e5ee855..09b55665 100644
--- a/test/test.json
+++ b/test/test.json
@@ -7,10 +7,10 @@
"description": "Оглавление Многообещающее начало Тухлый финал Предыстория конфликта Пригожина и Минобороны Расклад сил в момент мятежа Теории заговора Сообщники Пригожина в элитах О чем договорились с Пригожиным Разочарование со всех сторон Как закрывали уголовное дело Последствия для Пригожина Последствия для ЧВК Последствия для российской бюрократии и Путина Вышел...",
"visibility": "community",
"subtitle": "Исчерпывающий разбор причин, хода и последствий",
- "slug": "kaktankivtsirkezastrevaliirylisiyamypodmoskvoy",
+ "slug": "testtesttest",
"layout": "article",
- "cover": "http://cdn.discours.io/caf24deb-c415-49ef-8404-418455c57c5c.webp",
- "body": "Вышел Путин на крыльцо,
Потеряв вконец лицо.
Об опасности конца
Говорил с того крыльца,
Про предателей, про бунт,
О вреде военных хунт,
Про гражданскую войну,
Про несчастную страну,
Положив на музыкантов
Вот за это всю вину.
К сожаленью президент,
Запилив такой контент,
Не сдержавшись в выраженьях,
Упустил такой момент:
Чтобы кресло сохранить,
Нужно меньше говорить,
Как тебя на этом кресле
Не проблемно заменить.
Автор неизвестен
В России вещи, о которых трубят из каждого утюга, все равно происходят неожиданно. Долго говорили, насколько невероятна война с Украиной, а это случилось. Говорили о том, что частные армии опасны для государственной бюрократии, — начался военный мятеж. Шутили «будем бомбить Воронеж» (не смотри, что в анекдоте) — и это тоже случилось. Говорили, что рано или поздно люди из системы начнут жрать друг друга, — и вот вчерашний герой Пригожин уже вымарывается из российской истории.
Многообещающее начало
23 июня Евгений Пригожин начал вооруженный мятеж после того, как министр обороны Сергей Шойгу потребовал, чтобы наемники ЧВК «Вагнер» подписали контракты с Минобороны до 1 июля. То есть попытался лишить Пригожина его кормовой и силовой базы в виде частной армии.
По версии Пригожина, Минобороны нанесло ракетный удар по лагерю «Вагнера», а также направило спецназ для захвата его самого. Однако, как выяснилось, о начавшемся отходе «вагнеров» из захваченного Бахмута и готовящемся мятеже уже 22 июня знала ФСБ из официального письма заместителя Пригожина в ЧВК Андрея Трошева. В США и вовсе заявили, что наблюдали за подготовкой мятежа две недели. О том же сообщила немецкая разведка. И, наконец, провалившееся задержание Пригожина должно было состояться не в лагере наемников, а в Санкт-Петербурге.
Военный мятеж предварялся обращением Пригожина в телеграм, в котором он открыл общественности секрет Полишинеля. В частности, обвинил руководство Минобороны в развале армии, рассказал, что захват Украины нужен был для распила российскими олигархами бизнеса на новых территориях, как это было на Донбассе, заявил, что пора покончить с обманом и коррупцией в стране, и потребовал выдать ему министра обороны Шойгу и главу генштаба Герасимова.
Шойгу спешно свалил из Ростова. Сам город и военные объекты Ростовской области были заняты «Вагнером».
Нужно ли говорить, что все полицейские разбежались, решив, что на этом их полномочия — всё. Такой серьезный митинг разогнать шокерами и дубинками решительно нельзя.
В Кремле едва успевали подносить и опорожнять чемоданчики. Ведь Путин не испытывал подобных стрессов со времен Болотной площади, когда реально испугался потери власти, после чего стал превращать правоохранительную систему в политическую полицию, создал Росгвардию и «заболел цифровизацией» как инструментом тотальной слежки за гражданами. Гражданское общество с белыми ленточками подавили, но беда пришла со стороны людей с шевронами «Наш бизнес — смерть, и бизнес идет хорошо». Страшно, очень страшно.
Путин записал обращение, в котором назвал наемников предателями, обещал неминуемое наказание (которое таки минуло) и вспомнил 1917 год.
Услышав про 1917 год, все, кроме «болота», в течение суток ждали досрочного прекращения полномочий президента. Правящая элита, включая Путина, покинула Москву. Косплеить украинское руководство и записывать ролики на Красной площади не стали. В Москве остался только Володин. Когда все утихло, он решил повысить свой аппаратный вес и призвал наказать бежавших. То есть почти всю верхушку страны. А в ней, между прочим, олигархи путинской волны, друзья детства, кооператив «Озеро» и всё, что навевает теплые воспоминания из прошлого.
Отвечая на обращение Путина, Пригожин неосторожно заявил, что президент ошибается, и мятеж — это не мятеж, а «марш справедливости». При этом глава ЧВК требовал, чтобы никто не сопротивлялся колоннам наемников, движущимся на Москву, а любой, кто встанет на пути, будет уничтожен. Потому что никто не встанет на пути у справедливости.
Глава ЧВК требовал, чтобы никто не сопротивлялся колоннам наемников, движущимся на Москву, а любой, кто встанет на пути, будет уничтожен / Скриншот из обращения Пригожина из Ростова / fedpress.ru 
После некоторой фрустрации ФСБ очухалась и забегала по военкоматам, собирая информацию о женах и родственниках «вагнеров». Под Москвой начали разрывать экскаваторами дороги и выставлять грузовики с песком. Кадыров заверил Путина в своей преданности и отправил в направлении Ростова батальон «Ахмат», который в очередной раз весьма благоразумно не доехал до точки соприкосновения.
Тухлый финал
Вечером 24 июня, когда колонна «Вагнера» была в 200 км от Москвы, Пригожин решил развернуть колонну и вернуться в полевые лагеря во избежание кровопролития (умолчав о куче перебитой российской авиации с РЭБ и ее экипажах).
Ответственность за срыв мятежа взял на себя Лукашенко и сымитировал переговоры с Пригожиным, передав тому предложения Путина, который не осмелился лично ответить на звонок мятежника. Лукашенко с радостью вписался во что-то более легитимирующее его шаткую власть, чем осмотр «обосранных» коров в колхозах.
Позже Песков сообщил, что Пригожин уезжает в Беларусь, а те «вагнера», которые на участвовали в мятеже, могут заключить контракты с Минобороны. В Беларуси был раскинут лагерь на 8 тысяч человек.
У Путина от избытка адреналина развязался язык. Он провел открытое совещание Совбеза, записывал обращения, рассказывал о попытке начать гражданскую войну, клеймил предателей, благодарил всех, кто не разбежался. И, наконец, сдал все пароли и явки, заявив, что за год государство потратило на «Вагнер» и Пригожина 276 млрд рублей. Позже пропагандист Дмитрий Киселев назвал цифру в 858 млрд, которые Пригожин получил через холдинг «Конкорд».
Одна из перекопанный дорог, которая должна была усложнить поход «вагнеровцев» на Москву / Фото: соцсети, Липецкая область 
Все бы ничего, ведь активная часть гражданского общества обо всем и так знала. И о Сирии, и об Африке, и об Украине. Но Путин забылся и разоткровенничался перед своим ядерным электоратом, тем самым «болотом», которое смотрит телик, мало осведомлено о ЧВК, верит в сильного президента и патриотическую сплоченность. А теперь им рассказали, что государство финансирует через левые схемы частные военизированные формирования, которые ставят страну на грань гражданской войны.
Президент теперь не находится над схваткой, а является ее частью, и спасает его Лукашенко, который всеми силами демонстрирует, что его яйца крепче, чем картофель и покрышка БелАЗа.
Главу Росгвардии Золотова наградили за защиту Москвы, которая не состоялась. А самой Росгвардии обещали выдать танки и прочую тяжелую технику, которая теперь не отправится на фронт. Если будет выдана. Видимо, ожидают повторного марша государственных и полугосударственных военных на Москву.
Так феодализм оформился и в военной сфере: армия против Украины, другая армия против этой армии, региональные армии на случай войны с федералами и частные армии на случай войны с конкурирующими корпорациями за активы. Не удивительно, что Пригожина возмутило, что его хотят лишить своей армии, когда у всех уважаемых людей она есть.
Уголовное дело против Пригожина было юридически неграмотно прекращено, несмотря на убитых «вагнерами» летчиков, которых Путин почтил минутой молчания, выступая на крыльце Грановитой палаты Кремля перед сотрудниками ФСО и военным руководством.
В частности, 28 июня сообщили, что арестован генерал Суровикин, лоббист «Вагнера» в Министерстве обороны, несмотря на то что осудил мятеж после его начала, записав соответствующее видеообращение при неустановленных обстоятельствах. Правозащитник Ольга Романова рассказала, что в СИЗО «Лефортово» была принята и передана задержанному открытка, отправленная на имя Суровикина С. В. Предположительно, сейчас Суровикин находится под другой мерой пресечения — запретом на совершение определенных действий.
Неизвестна судьба генерала Мизинцева, который до увольнения из Минобороны обеспечивал серые поставки «вагнерам» боеприпасов во время войны с Украиной, за что был уволен и немедленно трудоустроен заместителем в ЧВК «Вагнер».
В течение недели после мятежа начались чистки в Минобороны.
Бизнес-империю Пригожина начали рушить, включая его силовые, медийные и чисто коммерческие ресурсы. Его репутацию тоже уничтожают. Пропагандисты на федеральных каналах развернулись на 180 градусов, клеймят предателя и рассказывают от том, насколько преувеличена роль «Вагнера» на фронте.
И, конечно же, показывают «глубинному народу» материалы обысков во дворце Пригожина с найденными в нем наградным оружием, париками для маскировки и, по неподтвержденным данным, костюмом Папы Римского.
Утверждается, что в ходе обысков у Пригожина нашли его фотографии в различных обличьях / Коллаж: topcor.ru 
Предыстория конфликта Пригожина и Минобороны
На протяжении 2023 года в военной и чекистской бюрократии устоялась концепция того, что зарвавшегося Пригожина (выскочку, человека не из системы, с чрезмерными политическими амбициями) готовят на заклание. Слишком быстрый рост популярности при отсутствии аппаратного веса. Или, если короче, «кто он вообще такой, чтобы так борзеть?».
Минобороны ограничивало снабжение ЧВК боеприпасами, минировало пути отхода «Вагнера» из Бахмута и принуждало наемников заключить контракты с Минобороны. То есть пыталось лишить Пригожина его собственной пирамиды, на вершине которой он таки имел аппаратный вес. Но этот аппарат слишком обособился от военной бюрократии. Нарушил пресловутую монополию государства на легальное насилие. Опасно.
Обнулять «Вагнер» Шойгу начал еще во время сирийской кампании, где Россия помогала Башару Асаду сохранить свою диктаторскую власть.
По воспоминаниям корреспондента пригожинской пропагандистской помойки РИА «ФАН» Кирилла Романовского, весной 2016 года, после взятия наемниками Пальмиры, Шойгу заявил, что какие-то гопники не могут получать государственные награды РФ. И раздал награды своим гопникам из Минобороны.
Во времена этой же кампании случилось уничтожение 200 «вагнеров», шедших на захват нефтеперерабатывающего завода. На запрос США: «Это ваши?» — Минобороны ответило: «Не, не наши». Американцы пожали плечами и нанесли по колонне авиаудар, полностью очистивший ландшафт от всей имеющейся на нем фауны.
Список
Список
Список
Раз
Два
Три
Понимая, куда все движется, длительное время Пригожин как когда-то генералиссимус Валленштейн (тоже владевший частной армией) находился в полевых лагерях, откуда критиковал государственную армию, заверяя императора в том, что будет воевать в его интересах, но по своему усмотрению.
Как и для Валленштейна, для Пригожина частная армия являлась единственным гарантом выживания в борьбе с тяжеловесами из государственной бюрократии — Шойгу и Герасимовым. Те не забыли оскорблений Пригожина и долго низводили численный состав «Вагнера» к минимуму, перекрыв доступ к вербовке зеков, держа наемников на передней линии фронта для перемалывания их руками ВСУ и, наконец, требуя перейти на контракты с Минобороны.
Сообщники Пригожина в элитах
А что насчет сообщников, единомышленников или по крайней мере сочувствующих Пригожину в государственной бюрократии? Можно говорить о ситуативном содействии отдельных чиновников Пригожину, но не о спланированном мятеже с целью смены высших должностных лиц, включая президента.
Поскольку государство авторитарное, кажется, что у него единый центр принятия решений. Эта иллюзия заставляет думать, что все происходящее — это часть некоего плана.
Тут случился треш)))

Читайте также
Право народа на восстание. Можно ли защищать демократию силой?
Как Пригожин вербовал заключенных на войну. Репортаж из колонии о приезде основателя ЧВК «Вагнер»
«Вы — пушечное мясо». Почему российские власти творят всякий треш?
«Они хотят вырваться из русской тюрьмы». Ольга Романова о заключенных на фронте и новых законах после мятежа Пригожина
«Я не могу желать поражения русской армии». Почему националисты и нацболы не выступают против войны в Украине?
Цитата любопытно смещает эмбед
А текст после цитаты пишется здесь
",
+ "cover": "https://cdn.discours.io/caf24deb-c415-49ef-8404-418455c57c5c.webp",
+ "body": "Вышел Путин на крыльцо,
Потеряв вконец лицо.
Об опасности конца
Говорил с того крыльца,
Про предателей, про бунт,
О вреде военных хунт,
Про гражданскую войну,
Про несчастную страну,
Положив на музыкантов
Вот за это всю вину.
К сожаленью президент,
Запилив такой контент,
Не сдержавшись в выраженьях,
Упустил такой момент:
Чтобы кресло сохранить,
Нужно меньше говорить,
Как тебя на этом кресле
Не проблемно заменить.
Автор неизвестен
В России вещи, о которых трубят из каждого утюга, все равно происходят неожиданно. Долго говорили, насколько невероятна война с Украиной, а это случилось. Говорили о том, что частные армии опасны для государственной бюрократии, — начался военный мятеж. Шутили «будем бомбить Воронеж» (не смотри, что в анекдоте) — и это тоже случилось. Говорили, что рано или поздно люди из системы начнут жрать друг друга, — и вот вчерашний герой Пригожин уже вымарывается из российской истории.
Многообещающее начало
23 июня Евгений Пригожин начал вооруженный мятеж после того, как министр обороны Сергей Шойгу потребовал, чтобы наемники ЧВК «Вагнер» подписали контракты с Минобороны до 1 июля. То есть попытался лишить Пригожина его кормовой и силовой базы в виде частной армии.
По версии Пригожина, Минобороны нанесло ракетный удар по лагерю «Вагнера», а также направило спецназ для захвата его самого. Однако, как выяснилось, о начавшемся отходе «вагнеров» из захваченного Бахмута и готовящемся мятеже уже 22 июня знала ФСБ из официального письма заместителя Пригожина в ЧВК Андрея Трошева. В США и вовсе заявили, что наблюдали за подготовкой мятежа две недели. О том же сообщила немецкая разведка. И, наконец, провалившееся задержание Пригожина должно было состояться не в лагере наемников, а в Санкт-Петербурге.
Военный мятеж предварялся обращением Пригожина в телеграм, в котором он открыл общественности секрет Полишинеля. В частности, обвинил руководство Минобороны в развале армии, рассказал, что захват Украины нужен был для распила российскими олигархами бизнеса на новых территориях, как это было на Донбассе, заявил, что пора покончить с обманом и коррупцией в стране, и потребовал выдать ему министра обороны Шойгу и главу генштаба Герасимова.
Шойгу спешно свалил из Ростова. Сам город и военные объекты Ростовской области были заняты «Вагнером».
Нужно ли говорить, что все полицейские разбежались, решив, что на этом их полномочия — всё. Такой серьезный митинг разогнать шокерами и дубинками решительно нельзя.
В Кремле едва успевали подносить и опорожнять чемоданчики. Ведь Путин не испытывал подобных стрессов со времен Болотной площади, когда реально испугался потери власти, после чего стал превращать правоохранительную систему в политическую полицию, создал Росгвардию и «заболел цифровизацией» как инструментом тотальной слежки за гражданами. Гражданское общество с белыми ленточками подавили, но беда пришла со стороны людей с шевронами «Наш бизнес — смерть, и бизнес идет хорошо». Страшно, очень страшно.
Путин записал обращение, в котором назвал наемников предателями, обещал неминуемое наказание (которое таки минуло) и вспомнил 1917 год.
Услышав про 1917 год, все, кроме «болота», в течение суток ждали досрочного прекращения полномочий президента. Правящая элита, включая Путина, покинула Москву. Косплеить украинское руководство и записывать ролики на Красной площади не стали. В Москве остался только Володин. Когда все утихло, он решил повысить свой аппаратный вес и призвал наказать бежавших. То есть почти всю верхушку страны. А в ней, между прочим, олигархи путинской волны, друзья детства, кооператив «Озеро» и всё, что навевает теплые воспоминания из прошлого.
Отвечая на обращение Путина, Пригожин неосторожно заявил, что президент ошибается, и мятеж — это не мятеж, а «марш справедливости». При этом глава ЧВК требовал, чтобы никто не сопротивлялся колоннам наемников, движущимся на Москву, а любой, кто встанет на пути, будет уничтожен. Потому что никто не встанет на пути у справедливости.
Глава ЧВК требовал, чтобы никто не сопротивлялся колоннам наемников, движущимся на Москву, а любой, кто встанет на пути, будет уничтожен / Скриншот из обращения Пригожина из Ростова / fedpress.ru 
После некоторой фрустрации ФСБ очухалась и забегала по военкоматам, собирая информацию о женах и родственниках «вагнеров». Под Москвой начали разрывать экскаваторами дороги и выставлять грузовики с песком. Кадыров заверил Путина в своей преданности и отправил в направлении Ростова батальон «Ахмат», который в очередной раз весьма благоразумно не доехал до точки соприкосновения.
Тухлый финал
Вечером 24 июня, когда колонна «Вагнера» была в 200 км от Москвы, Пригожин решил развернуть колонну и вернуться в полевые лагеря во избежание кровопролития (умолчав о куче перебитой российской авиации с РЭБ и ее экипажах).
Ответственность за срыв мятежа взял на себя Лукашенко и сымитировал переговоры с Пригожиным, передав тому предложения Путина, который не осмелился лично ответить на звонок мятежника. Лукашенко с радостью вписался во что-то более легитимирующее его шаткую власть, чем осмотр «обосранных» коров в колхозах.
Позже Песков сообщил, что Пригожин уезжает в Беларусь, а те «вагнера», которые на участвовали в мятеже, могут заключить контракты с Минобороны. В Беларуси был раскинут лагерь на 8 тысяч человек.
У Путина от избытка адреналина развязался язык. Он провел открытое совещание Совбеза, записывал обращения, рассказывал о попытке начать гражданскую войну, клеймил предателей, благодарил всех, кто не разбежался. И, наконец, сдал все пароли и явки, заявив, что за год государство потратило на «Вагнер» и Пригожина 276 млрд рублей. Позже пропагандист Дмитрий Киселев назвал цифру в 858 млрд, которые Пригожин получил через холдинг «Конкорд».
Одна из перекопанный дорог, которая должна была усложнить поход «вагнеровцев» на Москву / Фото: соцсети, Липецкая область 
Все бы ничего, ведь активная часть гражданского общества обо всем и так знала. И о Сирии, и об Африке, и об Украине. Но Путин забылся и разоткровенничался перед своим ядерным электоратом, тем самым «болотом», которое смотрит телик, мало осведомлено о ЧВК, верит в сильного президента и патриотическую сплоченность. А теперь им рассказали, что государство финансирует через левые схемы частные военизированные формирования, которые ставят страну на грань гражданской войны.
Президент теперь не находится над схваткой, а является ее частью, и спасает его Лукашенко, который всеми силами демонстрирует, что его яйца крепче, чем картофель и покрышка БелАЗа.
Главу Росгвардии Золотова наградили за защиту Москвы, которая не состоялась. А самой Росгвардии обещали выдать танки и прочую тяжелую технику, которая теперь не отправится на фронт. Если будет выдана. Видимо, ожидают повторного марша государственных и полугосударственных военных на Москву.
Так феодализм оформился и в военной сфере: армия против Украины, другая армия против этой армии, региональные армии на случай войны с федералами и частные армии на случай войны с конкурирующими корпорациями за активы. Не удивительно, что Пригожина возмутило, что его хотят лишить своей армии, когда у всех уважаемых людей она есть.
Уголовное дело против Пригожина было юридически неграмотно прекращено, несмотря на убитых «вагнерами» летчиков, которых Путин почтил минутой молчания, выступая на крыльце Грановитой палаты Кремля перед сотрудниками ФСО и военным руководством.
В частности, 28 июня сообщили, что арестован генерал Суровикин, лоббист «Вагнера» в Министерстве обороны, несмотря на то что осудил мятеж после его начала, записав соответствующее видеообращение при неустановленных обстоятельствах. Правозащитник Ольга Романова рассказала, что в СИЗО «Лефортово» была принята и передана задержанному открытка, отправленная на имя Суровикина С. В. Предположительно, сейчас Суровикин находится под другой мерой пресечения — запретом на совершение определенных действий.
Неизвестна судьба генерала Мизинцева, который до увольнения из Минобороны обеспечивал серые поставки «вагнерам» боеприпасов во время войны с Украиной, за что был уволен и немедленно трудоустроен заместителем в ЧВК «Вагнер».
В течение недели после мятежа начались чистки в Минобороны.
Бизнес-империю Пригожина начали рушить, включая его силовые, медийные и чисто коммерческие ресурсы. Его репутацию тоже уничтожают. Пропагандисты на федеральных каналах развернулись на 180 градусов, клеймят предателя и рассказывают от том, насколько преувеличена роль «Вагнера» на фронте.
И, конечно же, показывают «глубинному народу» материалы обысков во дворце Пригожина с найденными в нем наградным оружием, париками для маскировки и, по неподтвержденным данным, костюмом Папы Римского.
Утверждается, что в ходе обысков у Пригожина нашли его фотографии в различных обличьях / Коллаж: topcor.ru 
Предыстория конфликта Пригожина и Минобороны
На протяжении 2023 года в военной и чекистской бюрократии устоялась концепция того, что зарвавшегося Пригожина (выскочку, человека не из системы, с чрезмерными политическими амбициями) готовят на заклание. Слишком быстрый рост популярности при отсутствии аппаратного веса. Или, если короче, «кто он вообще такой, чтобы так борзеть?».
Минобороны ограничивало снабжение ЧВК боеприпасами, минировало пути отхода «Вагнера» из Бахмута и принуждало наемников заключить контракты с Минобороны. То есть пыталось лишить Пригожина его собственной пирамиды, на вершине которой он таки имел аппаратный вес. Но этот аппарат слишком обособился от военной бюрократии. Нарушил пресловутую монополию государства на легальное насилие. Опасно.
Обнулять «Вагнер» Шойгу начал еще во время сирийской кампании, где Россия помогала Башару Асаду сохранить свою диктаторскую власть.
По воспоминаниям корреспондента пригожинской пропагандистской помойки РИА «ФАН» Кирилла Романовского, весной 2016 года, после взятия наемниками Пальмиры, Шойгу заявил, что какие-то гопники не могут получать государственные награды РФ. И раздал награды своим гопникам из Минобороны.
Во времена этой же кампании случилось уничтожение 200 «вагнеров», шедших на захват нефтеперерабатывающего завода. На запрос США: «Это ваши?» — Минобороны ответило: «Не, не наши». Американцы пожали плечами и нанесли по колонне авиаудар, полностью очистивший ландшафт от всей имеющейся на нем фауны.
Список
Список
Список
Раз
Два
Три
Понимая, куда все движется, длительное время Пригожин как когда-то генералиссимус Валленштейн (тоже владевший частной армией) находился в полевых лагерях, откуда критиковал государственную армию, заверяя императора в том, что будет воевать в его интересах, но по своему усмотрению.
Как и для Валленштейна, для Пригожина частная армия являлась единственным гарантом выживания в борьбе с тяжеловесами из государственной бюрократии — Шойгу и Герасимовым. Те не забыли оскорблений Пригожина и долго низводили численный состав «Вагнера» к минимуму, перекрыв доступ к вербовке зеков, держа наемников на передней линии фронта для перемалывания их руками ВСУ и, наконец, требуя перейти на контракты с Минобороны.
Сообщники Пригожина в элитах
А что насчет сообщников, единомышленников или по крайней мере сочувствующих Пригожину в государственной бюрократии? Можно говорить о ситуативном содействии отдельных чиновников Пригожину, но не о спланированном мятеже с целью смены высших должностных лиц, включая президента.
Поскольку государство авторитарное, кажется, что у него единый центр принятия решений. Эта иллюзия заставляет думать, что все происходящее — это часть некоего плана.
Тут случился треш)))

Читайте также
Право народа на восстание. Можно ли защищать демократию силой?
Как Пригожин вербовал заключенных на войну. Репортаж из колонии о приезде основателя ЧВК «Вагнер»
«Вы — пушечное мясо». Почему российские власти творят всякий треш?
«Они хотят вырваться из русской тюрьмы». Ольга Романова о заключенных на фронте и новых законах после мятежа Пригожина
«Я не могу желать поражения русской армии». Почему националисты и нацболы не выступают против войны в Украине?
Цитата любопытно смещает эмбед
А текст после цитаты пишется здесь
",
"media": null,
"mainTopic": "politics",
"topics": [
From 2679b2c8737151344957dd3969d50343d6b970a0 Mon Sep 17 00:00:00 2001
From: Ilya Y <75578537+ilya-bkv@users.noreply.github.com>
Date: Mon, 6 Nov 2023 12:03:04 +0300
Subject: [PATCH 16/27] debug code removed (#105)
Co-authored-by: Igor Lobanov
---
resolvers/zine/load.py | 1 -
1 file changed, 1 deletion(-)
diff --git a/resolvers/zine/load.py b/resolvers/zine/load.py
index 8defe383..4db893a9 100644
--- a/resolvers/zine/load.py
+++ b/resolvers/zine/load.py
@@ -86,7 +86,6 @@ async def load_shout(_, info, slug=None, shout_id=None):
test_shout = json.load(json_file)["data"]["loadShout"]
test_shout["createdAt"] = datetime.fromisoformat(test_shout["createdAt"])
test_shout["publishedAt"] = datetime.fromisoformat(test_shout["publishedAt"])
- print(test_shout)
return test_shout
with local_session() as session:
From 0e9f0b0682d43be79416d61dfc7a44bf477a33ec Mon Sep 17 00:00:00 2001
From: Ilya Y <75578537+ilya-bkv@users.noreply.github.com>
Date: Wed, 8 Nov 2023 21:12:55 +0300
Subject: [PATCH 17/27] Feature/google oauth (#106)
google oauth
---------
Co-authored-by: Igor Lobanov
---
auth/identity.py | 15 +++------------
auth/oauth.py | 21 ++++++++++++++++-----
main.py | 3 ---
requirements.txt | 2 +-
resolvers/auth.py | 24 ++----------------------
settings.py | 2 +-
6 files changed, 23 insertions(+), 44 deletions(-)
diff --git a/auth/identity.py b/auth/identity.py
index 7f704eff..c82e0b7a 100644
--- a/auth/identity.py
+++ b/auth/identity.py
@@ -3,7 +3,6 @@ from hashlib import sha256
from jwt import DecodeError, ExpiredSignatureError
from passlib.hash import bcrypt
-from sqlalchemy import or_
from auth.jwtcodec import JWTCodec
from auth.tokenstorage import TokenStorage
@@ -11,7 +10,6 @@ from auth.tokenstorage import TokenStorage
# from base.exceptions import InvalidPassword, InvalidToken
from base.orm import local_session
from orm import User
-from validations.auth import AuthInput
class Password:
@@ -65,20 +63,13 @@ class Identity:
return user
@staticmethod
- def oauth(inp: AuthInput) -> User:
+ def oauth(inp) -> User:
with local_session() as session:
- user = (
- session.query(User)
- .filter(or_(User.oauth == inp["oauth"], User.email == inp["email"]))
- .first()
- )
+ user = session.query(User).filter(User.email == inp["email"]).first()
if not user:
- user = User.create(**inp)
- if not user.oauth:
- user.oauth = inp["oauth"]
+ user = User.create(**inp, emailConfirmed=True)
session.commit()
- user = User(**user.dict())
return user
@staticmethod
diff --git a/auth/oauth.py b/auth/oauth.py
index 02f56ff5..c85e22b7 100644
--- a/auth/oauth.py
+++ b/auth/oauth.py
@@ -33,16 +33,25 @@ oauth.register(
oauth.register(
name="google",
- client_id=OAUTH_CLIENTS["GOOGLE"]["id"],
- client_secret=OAUTH_CLIENTS["GOOGLE"]["key"],
+ # client_id=OAUTH_CLIENTS["GOOGLE"]["id"],
+ # client_secret=OAUTH_CLIENTS["GOOGLE"]["key"],
+ client_id="648983473866-2hd6v2eqqk6hhqabfhuqq2slb2fkfvve.apps.googleusercontent.com",
+ client_secret="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",
server_metadata_url="https://accounts.google.com/.well-known/openid-configuration",
client_kwargs={"scope": "openid email profile"},
+ authorize_state="test",
)
async def google_profile(client, request, token):
- profile = await client.parse_id_token(request, token)
- profile["id"] = profile["sub"]
+ userinfo = token["userinfo"]
+
+ profile = {"name": userinfo["name"], "email": userinfo["email"], "id": userinfo["sub"]}
+
+ if userinfo["picture"]:
+ userpic = userinfo["picture"].replace("=s96", "=s600")
+ profile["userpic"] = userpic
+
return profile
@@ -67,7 +76,8 @@ async def oauth_login(request):
provider = request.path_params["provider"]
request.session["provider"] = provider
client = oauth.create_client(provider)
- redirect_uri = "https://v2.discours.io/oauth-authorize"
+ # redirect_uri = "http://v2.discours.io/oauth-authorize"
+ redirect_uri = "http://localhost:8080/oauth-authorize"
return await client.authorize_redirect(request, redirect_uri)
@@ -82,6 +92,7 @@ async def oauth_authorize(request):
"oauth": user_oauth_info,
"email": profile["email"],
"username": profile["name"],
+ "userpic": profile["userpic"],
}
user = Identity.oauth(user_input)
session_token = await TokenStorage.create_session(user)
diff --git a/main.py b/main.py
index eb21b15c..27cac2cc 100644
--- a/main.py
+++ b/main.py
@@ -16,7 +16,6 @@ from auth.oauth import oauth_authorize, oauth_login
from base.redis import redis
from base.resolvers import resolvers
from orm import init_tables
-from resolvers.auth import confirm_email_handler
from resolvers.upload import upload_handler
from services.main import storages_init
from services.notifications.notification_service import notification_service
@@ -71,10 +70,8 @@ async def shutdown():
routes = [
- # Route("/messages", endpoint=sse_messages),
Route("/oauth/{provider}", endpoint=oauth_login),
Route("/oauth-authorize", endpoint=oauth_authorize),
- Route("/confirm/{token}", endpoint=confirm_email_handler),
Route("/upload", endpoint=upload_handler, methods=["POST"]),
Route("/subscribe/{user_id}", endpoint=sse_subscribe_handler),
]
diff --git a/requirements.txt b/requirements.txt
index af3eee5f..6ab7bcef 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -3,7 +3,7 @@ aioredis~=2.0.1
alembic==1.11.3
ariadne>=0.17.0
asyncio~=3.4.3
-authlib>=1.1.0
+authlib==1.2.1
bcrypt>=4.0.0
beautifulsoup4~=4.11.1
boto3~=1.28.2
diff --git a/resolvers/auth.py b/resolvers/auth.py
index 4900e6c0..ed754044 100644
--- a/resolvers/auth.py
+++ b/resolvers/auth.py
@@ -5,7 +5,6 @@ from datetime import datetime, timezone
from urllib.parse import quote_plus
from graphql.type import GraphQLResolveInfo
-from starlette.responses import RedirectResponse
from transliterate import translit
from auth.authenticate import login_required
@@ -14,17 +13,11 @@ from auth.email import send_auth_email
from auth.identity import Identity, Password
from auth.jwtcodec import JWTCodec
from auth.tokenstorage import TokenStorage
-from base.exceptions import (
- BaseHttpException,
- InvalidPassword,
- InvalidToken,
- ObjectNotExist,
- Unauthorized,
-)
+from base.exceptions import InvalidPassword, InvalidToken, ObjectNotExist, Unauthorized
from base.orm import local_session
from base.resolvers import mutation, query
from orm import Role, User
-from settings import FRONTEND_URL, SESSION_TOKEN_HEADER
+from settings import SESSION_TOKEN_HEADER
@mutation.field("getSession")
@@ -64,19 +57,6 @@ async def confirm_email(_, info, token):
return {"error": "email is not confirmed"}
-async def confirm_email_handler(request):
- token = request.path_params["token"] # one time
- request.session["token"] = token
- res = await confirm_email(None, {}, token)
- print("[resolvers.auth] confirm_email request: %r" % request)
- if "error" in res:
- raise BaseHttpException(res["error"])
- else:
- response = RedirectResponse(url=FRONTEND_URL)
- response.set_cookie("token", res["token"]) # session token
- return response
-
-
def create_user(user_dict):
user = User(**user_dict)
with local_session() as session:
diff --git a/settings.py b/settings.py
index f3da9952..89739c80 100644
--- a/settings.py
+++ b/settings.py
@@ -23,7 +23,7 @@ for provider in OAUTH_PROVIDERS:
"id": environ.get(provider + "_OAUTH_ID"),
"key": environ.get(provider + "_OAUTH_KEY"),
}
-FRONTEND_URL = environ.get("FRONTEND_URL") or "http://localhost:3000"
+FRONTEND_URL = environ.get("FRONTEND_URL") or "https://localhost:3000"
SHOUTS_REPO = "content"
SESSION_TOKEN_HEADER = "Authorization"
From a2b47dab66dd718501598f2a053add0124eae6f4 Mon Sep 17 00:00:00 2001
From: Igor Lobanov
Date: Wed, 8 Nov 2023 19:19:20 +0100
Subject: [PATCH 18/27] google oauth fix
---
auth/oauth.py | 9 +++------
1 file changed, 3 insertions(+), 6 deletions(-)
diff --git a/auth/oauth.py b/auth/oauth.py
index c85e22b7..7021ecb7 100644
--- a/auth/oauth.py
+++ b/auth/oauth.py
@@ -33,10 +33,8 @@ oauth.register(
oauth.register(
name="google",
- # client_id=OAUTH_CLIENTS["GOOGLE"]["id"],
- # client_secret=OAUTH_CLIENTS["GOOGLE"]["key"],
- client_id="648983473866-2hd6v2eqqk6hhqabfhuqq2slb2fkfvve.apps.googleusercontent.com",
- client_secret="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",
+ client_id=OAUTH_CLIENTS["GOOGLE"]["id"],
+ client_secret=OAUTH_CLIENTS["GOOGLE"]["key"],
server_metadata_url="https://accounts.google.com/.well-known/openid-configuration",
client_kwargs={"scope": "openid email profile"},
authorize_state="test",
@@ -76,8 +74,7 @@ async def oauth_login(request):
provider = request.path_params["provider"]
request.session["provider"] = provider
client = oauth.create_client(provider)
- # redirect_uri = "http://v2.discours.io/oauth-authorize"
- redirect_uri = "http://localhost:8080/oauth-authorize"
+ redirect_uri = "http://v2.discours.io/oauth-authorize"
return await client.authorize_redirect(request, redirect_uri)
From 34e18317a2bb5983d801a888e1e6f5b0a097a105 Mon Sep 17 00:00:00 2001
From: Igor Lobanov
Date: Wed, 8 Nov 2023 19:24:38 +0100
Subject: [PATCH 19/27] google oauth fix
---
auth/oauth.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/auth/oauth.py b/auth/oauth.py
index 7021ecb7..25cc280a 100644
--- a/auth/oauth.py
+++ b/auth/oauth.py
@@ -74,7 +74,7 @@ async def oauth_login(request):
provider = request.path_params["provider"]
request.session["provider"] = provider
client = oauth.create_client(provider)
- redirect_uri = "http://v2.discours.io/oauth-authorize"
+ redirect_uri = "https://v2.discours.io/oauth-authorize"
return await client.authorize_redirect(request, redirect_uri)
From b63b6e7ee7beaafb05002d0c7d49d07a560c110b Mon Sep 17 00:00:00 2001
From: Ilya Y <75578537+ilya-bkv@users.noreply.github.com>
Date: Tue, 14 Nov 2023 14:56:41 +0300
Subject: [PATCH 20/27] timezones fixed once again (#107)
Co-authored-by: Igor Lobanov
---
orm/collection.py | 8 +++-----
orm/community.py | 12 +++++++-----
orm/notification.py | 7 ++++---
orm/reaction.py | 11 ++++++-----
orm/shout.py | 29 ++++++++++++++++++++---------
orm/topic.py | 8 ++++----
orm/user.py | 18 +++++++++++-------
7 files changed, 55 insertions(+), 38 deletions(-)
diff --git a/orm/collection.py b/orm/collection.py
index 8493844c..a8078867 100644
--- a/orm/collection.py
+++ b/orm/collection.py
@@ -1,6 +1,4 @@
-from datetime import datetime
-
-from sqlalchemy import Column, DateTime, ForeignKey, String
+from sqlalchemy import Column, DateTime, ForeignKey, String, func
from base.orm import Base
@@ -20,6 +18,6 @@ class Collection(Base):
title = Column(String, nullable=False, comment="Title")
body = Column(String, nullable=True, comment="Body")
pic = Column(String, nullable=True, comment="Picture")
- createdAt = Column(DateTime, default=datetime.now, comment="Created At")
+ createdAt = Column(DateTime(timezone=True), server_default=func.now(), comment="Created At")
createdBy = Column(ForeignKey("user.id"), comment="Created By")
- publishedAt = Column(DateTime, default=datetime.now, comment="Published At")
+ publishedAt = Column(DateTime(timezone=True), server_default=func.now(), comment="Published At")
diff --git a/orm/community.py b/orm/community.py
index 4cbfcc7a..762fe154 100644
--- a/orm/community.py
+++ b/orm/community.py
@@ -1,6 +1,4 @@
-from datetime import datetime
-
-from sqlalchemy import Column, DateTime, ForeignKey, String
+from sqlalchemy import Column, DateTime, ForeignKey, String, func
from base.orm import Base, local_session
@@ -11,7 +9,9 @@ class CommunityFollower(Base):
id = None
follower: Column = Column(ForeignKey("user.id"), primary_key=True)
community: Column = Column(ForeignKey("community.id"), primary_key=True)
- joinedAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
+ joinedAt = Column(
+ DateTime(timezone=True), nullable=False, server_default=func.now(), comment="Created at"
+ )
# role = Column(ForeignKey(Role.id), nullable=False, comment="Role for member")
@@ -22,7 +22,9 @@ class Community(Base):
slug = Column(String, nullable=False, unique=True, comment="Slug")
desc = Column(String, nullable=False, default="")
pic = Column(String, nullable=False, default="")
- createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
+ createdAt = Column(
+ DateTime(timezone=True), nullable=False, server_default=func.now(), comment="Created at"
+ )
@staticmethod
def init_table():
diff --git a/orm/notification.py b/orm/notification.py
index 429f07f2..8130b0bb 100644
--- a/orm/notification.py
+++ b/orm/notification.py
@@ -1,7 +1,6 @@
-from datetime import datetime
from enum import Enum as Enumeration
-from sqlalchemy import Boolean, Column, DateTime, Enum, ForeignKey, Integer
+from sqlalchemy import Boolean, Column, DateTime, Enum, ForeignKey, Integer, func
from sqlalchemy.dialects.postgresql import JSONB
from base.orm import Base
@@ -18,7 +17,9 @@ class Notification(Base):
shout: Column = Column(ForeignKey("shout.id"), index=True)
reaction: Column = Column(ForeignKey("reaction.id"), index=True)
user: Column = Column(ForeignKey("user.id"), index=True)
- createdAt = Column(DateTime, nullable=False, default=datetime.now, index=True)
+ createdAt = Column(
+ DateTime(timezone=True), nullable=False, server_default=func.now(), index=True
+ )
seen = Column(Boolean, nullable=False, default=False, index=True)
type = Column(Enum(NotificationType), nullable=False)
data = Column(JSONB, nullable=True)
diff --git a/orm/reaction.py b/orm/reaction.py
index 38520b72..d5ed55cb 100644
--- a/orm/reaction.py
+++ b/orm/reaction.py
@@ -1,7 +1,6 @@
-from datetime import datetime
from enum import Enum as Enumeration
-from sqlalchemy import Column, DateTime, Enum, ForeignKey, String
+from sqlalchemy import Column, DateTime, Enum, ForeignKey, String, func
from base.orm import Base
@@ -27,13 +26,15 @@ class ReactionKind(Enumeration):
class Reaction(Base):
__tablename__ = "reaction"
body = Column(String, nullable=True, comment="Reaction Body")
- createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
+ createdAt = Column(
+ DateTime(timezone=True), nullable=False, server_default=func.now(), comment="Created at"
+ )
createdBy: Column = Column(ForeignKey("user.id"), nullable=False, index=True, comment="Sender")
- updatedAt = Column(DateTime, nullable=True, comment="Updated at")
+ updatedAt = Column(DateTime(timezone=True), nullable=True, comment="Updated at")
updatedBy: Column = Column(
ForeignKey("user.id"), nullable=True, index=True, comment="Last Editor"
)
- deletedAt = Column(DateTime, nullable=True, comment="Deleted at")
+ deletedAt = Column(DateTime(timezone=True), nullable=True, comment="Deleted at")
deletedBy: Column = Column(
ForeignKey("user.id"), nullable=True, index=True, comment="Deleted by"
)
diff --git a/orm/shout.py b/orm/shout.py
index b1300ec6..e753faa5 100644
--- a/orm/shout.py
+++ b/orm/shout.py
@@ -1,6 +1,13 @@
-from datetime import datetime
-
-from sqlalchemy import JSON, Boolean, Column, DateTime, ForeignKey, Integer, String
+from sqlalchemy import (
+ JSON,
+ Boolean,
+ Column,
+ DateTime,
+ ForeignKey,
+ Integer,
+ String,
+ func,
+)
from sqlalchemy.orm import column_property, relationship
from base.orm import Base, local_session
@@ -24,8 +31,10 @@ class ShoutReactionsFollower(Base):
follower: Column = Column(ForeignKey("user.id"), primary_key=True, index=True)
shout: Column = Column(ForeignKey("shout.id"), primary_key=True, index=True)
auto = Column(Boolean, nullable=False, default=False)
- createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
- deletedAt = Column(DateTime, nullable=True)
+ createdAt = Column(
+ DateTime(timezone=True), nullable=False, server_default=func.now(), comment="Created at"
+ )
+ deletedAt = Column(DateTime(timezone=True), nullable=True)
class ShoutAuthor(Base):
@@ -41,10 +50,12 @@ class Shout(Base):
__tablename__ = "shout"
# timestamps
- createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
- updatedAt = Column(DateTime, nullable=True, comment="Updated at")
- publishedAt = Column(DateTime, nullable=True)
- deletedAt = Column(DateTime, nullable=True)
+ createdAt = Column(
+ DateTime(timezone=True), nullable=False, server_default=func.now(), comment="Created at"
+ )
+ updatedAt = Column(DateTime(timezone=True), nullable=True, comment="Updated at")
+ publishedAt = Column(DateTime(timezone=True), nullable=True)
+ deletedAt = Column(DateTime(timezone=True), nullable=True)
createdBy: Column = Column(ForeignKey("user.id"), comment="Created By")
deletedBy: Column = Column(ForeignKey("user.id"), nullable=True)
diff --git a/orm/topic.py b/orm/topic.py
index 375d5834..0b42d3cb 100644
--- a/orm/topic.py
+++ b/orm/topic.py
@@ -1,6 +1,4 @@
-from datetime import datetime
-
-from sqlalchemy import Boolean, Column, DateTime, ForeignKey, String
+from sqlalchemy import Boolean, Column, DateTime, ForeignKey, String, func
from base.orm import Base
@@ -11,7 +9,9 @@ class TopicFollower(Base):
id = None
follower: Column = Column(ForeignKey("user.id"), primary_key=True, index=True)
topic: Column = Column(ForeignKey("topic.id"), primary_key=True, index=True)
- createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
+ createdAt = Column(
+ DateTime(timezone=True), nullable=False, server_default=func.now(), comment="Created at"
+ )
auto = Column(Boolean, nullable=False, default=False)
diff --git a/orm/user.py b/orm/user.py
index 5379b586..b95891a7 100644
--- a/orm/user.py
+++ b/orm/user.py
@@ -1,7 +1,5 @@
-from datetime import datetime
-
from sqlalchemy import JSON as JSONType
-from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, String
+from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, String, func
from sqlalchemy.orm import relationship
from base.orm import Base, local_session
@@ -35,7 +33,9 @@ class AuthorFollower(Base):
id = None
follower: Column = Column(ForeignKey("user.id"), primary_key=True, index=True)
author: Column = Column(ForeignKey("user.id"), primary_key=True, index=True)
- createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
+ createdAt = Column(
+ DateTime(timezone=True), nullable=False, server_default=func.now(), comment="Created at"
+ )
auto = Column(Boolean, nullable=False, default=False)
@@ -53,9 +53,13 @@ class User(Base):
slug = Column(String, unique=True, comment="User's slug")
muted = Column(Boolean, default=False)
emailConfirmed = Column(Boolean, default=False)
- createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
- lastSeen = Column(DateTime, nullable=False, default=datetime.now, comment="Was online at")
- deletedAt = Column(DateTime, nullable=True, comment="Deleted at")
+ createdAt = Column(
+ DateTime(timezone=True), nullable=False, server_default=func.now(), comment="Created at"
+ )
+ lastSeen = Column(
+ DateTime(timezone=True), nullable=False, server_default=func.now(), comment="Was online at"
+ )
+ deletedAt = Column(DateTime(timezone=True), nullable=True, comment="Deleted at")
links = Column(JSONType, nullable=True, comment="Links")
oauth = Column(String, nullable=True)
ratings = relationship(UserRating, foreign_keys=UserRating.user)
From f9bc1d67aec780a3e14e1bd54f8d94bbbdfcb3fb Mon Sep 17 00:00:00 2001
From: Igor Lobanov
Date: Wed, 13 Dec 2023 23:56:01 +0100
Subject: [PATCH 21/27] random top articles query (#109)
* loadRandomTopShouts
* minor fixes
---------
Co-authored-by: Igor Lobanov
---
resolvers/zine/load.py | 150 ++++++++++++++++++++++++++---------------
schema.graphql | 12 +++-
2 files changed, 106 insertions(+), 56 deletions(-)
diff --git a/resolvers/zine/load.py b/resolvers/zine/load.py
index 4db893a9..db1ee0e2 100644
--- a/resolvers/zine/load.py
+++ b/resolvers/zine/load.py
@@ -1,5 +1,5 @@
import json
-from datetime import datetime, timedelta, timezone
+from datetime import datetime, timedelta
from sqlalchemy.orm import aliased, joinedload
from sqlalchemy.sql.expression import and_, asc, case, desc, func, nulls_last, select
@@ -15,6 +15,41 @@ from orm.shout import Shout, ShoutAuthor, ShoutTopic
from orm.user import AuthorFollower
+def get_shouts_from_query(q):
+ shouts = []
+ with local_session() as session:
+ for [shout, reacted_stat, commented_stat, rating_stat, last_comment] in session.execute(
+ q
+ ).unique():
+ shouts.append(shout)
+ shout.stat = {
+ "viewed": shout.views,
+ "reacted": reacted_stat,
+ "commented": commented_stat,
+ "rating": rating_stat,
+ }
+
+ return shouts
+
+
+def get_rating_func(aliased_reaction):
+ return func.sum(
+ case(
+ # do not count comments' reactions
+ (aliased_reaction.replyTo.is_not(None), 0),
+ (aliased_reaction.kind == ReactionKind.AGREE, 1),
+ (aliased_reaction.kind == ReactionKind.DISAGREE, -1),
+ (aliased_reaction.kind == ReactionKind.PROOF, 1),
+ (aliased_reaction.kind == ReactionKind.DISPROOF, -1),
+ (aliased_reaction.kind == ReactionKind.ACCEPT, 1),
+ (aliased_reaction.kind == ReactionKind.REJECT, -1),
+ (aliased_reaction.kind == ReactionKind.LIKE, 1),
+ (aliased_reaction.kind == ReactionKind.DISLIKE, -1),
+ else_=0,
+ )
+ )
+
+
def add_stat_columns(q):
aliased_reaction = aliased(Reaction)
@@ -23,21 +58,7 @@ def add_stat_columns(q):
func.sum(case((aliased_reaction.kind == ReactionKind.COMMENT, 1), else_=0)).label(
"commented_stat"
),
- func.sum(
- case(
- # do not count comments' reactions
- (aliased_reaction.replyTo.is_not(None), 0),
- (aliased_reaction.kind == ReactionKind.AGREE, 1),
- (aliased_reaction.kind == ReactionKind.DISAGREE, -1),
- (aliased_reaction.kind == ReactionKind.PROOF, 1),
- (aliased_reaction.kind == ReactionKind.DISPROOF, -1),
- (aliased_reaction.kind == ReactionKind.ACCEPT, 1),
- (aliased_reaction.kind == ReactionKind.REJECT, -1),
- (aliased_reaction.kind == ReactionKind.LIKE, 1),
- (aliased_reaction.kind == ReactionKind.DISLIKE, -1),
- else_=0,
- )
- ).label("rating_stat"),
+ get_rating_func(aliased_reaction).label("rating_stat"),
func.max(
case(
(aliased_reaction.kind != ReactionKind.COMMENT, None),
@@ -67,13 +88,14 @@ def apply_filters(q, filters, user_id=None): # noqa: C901
q = q.filter(Shout.authors.any(slug=filters.get("author")))
if filters.get("topic"):
q = q.filter(Shout.topics.any(slug=filters.get("topic")))
- if filters.get("title"):
- q = q.filter(Shout.title.ilike(f'%{filters.get("title")}%'))
- if filters.get("body"):
- q = q.filter(Shout.body.ilike(f'%{filters.get("body")}%s'))
- if filters.get("days"):
- before = datetime.now(tz=timezone.utc) - timedelta(days=int(filters.get("days")) or 30)
- q = q.filter(Shout.createdAt > before)
+ if filters.get("fromDate"):
+ # fromDate: '2022-12-31
+ date_from = datetime.strptime(filters.get("fromDate"), "%Y-%m-%d")
+ q = q.filter(Shout.createdAt >= date_from)
+ if filters.get("toDate"):
+ # toDate: '2023-12-31'
+ date_to = datetime.strptime(filters.get("toDate"), "%Y-%m-%d")
+ q = q.filter(Shout.createdAt < (date_to + timedelta(days=1)))
return q
@@ -136,7 +158,8 @@ async def load_shouts_by(_, info, options):
topic: 'culture',
title: 'something',
body: 'something else',
- days: 30
+ fromDate: '2022-12-31',
+ toDate: '2023-12-31'
}
offset: 0
limit: 50
@@ -169,23 +192,57 @@ async def load_shouts_by(_, info, options):
q = q.group_by(Shout.id).order_by(nulls_last(query_order_by)).limit(limit).offset(offset)
- shouts = []
- with local_session() as session:
- shouts_map = {}
+ return get_shouts_from_query(q)
- for [shout, reacted_stat, commented_stat, rating_stat, last_comment] in session.execute(
- q
- ).unique():
- shouts.append(shout)
- shout.stat = {
- "viewed": shout.views,
- "reacted": reacted_stat,
- "commented": commented_stat,
- "rating": rating_stat,
- }
- shouts_map[shout.id] = shout
- return shouts
+@query.field("loadRandomTopShouts")
+async def load_random_top_shouts(_, info, params):
+ """
+ :param params: {
+ filters: {
+ layout: 'music',
+ excludeLayout: 'article',
+ fromDate: '2022-12-31'
+ toDate: '2023-12-31'
+ }
+ fromRandomCount: 100,
+ limit: 50
+ }
+ :return: Shout[]
+ """
+
+ aliased_reaction = aliased(Reaction)
+
+ subquery = (
+ select(Shout.id)
+ .outerjoin(aliased_reaction)
+ .where(and_(Shout.deletedAt.is_(None), Shout.layout.is_not(None)))
+ )
+
+ subquery = apply_filters(subquery, params.get("filters", {}))
+ subquery = subquery.group_by(Shout.id).order_by(desc(get_rating_func(aliased_reaction)))
+
+ from_random_count = params.get("fromRandomCount")
+ if from_random_count:
+ subquery = subquery.limit(from_random_count)
+
+ q = (
+ select(Shout)
+ .options(
+ joinedload(Shout.authors),
+ joinedload(Shout.topics),
+ )
+ .where(Shout.id.in_(subquery))
+ )
+
+ q = add_stat_columns(q)
+
+ limit = params.get("limit", 10)
+ q = q.group_by(Shout.id).order_by(func.random()).limit(limit)
+
+ # print(q.compile(compile_kwargs={"literal_binds": True}))
+
+ return get_shouts_from_query(q)
@query.field("loadDrafts")
@@ -256,17 +313,4 @@ async def get_my_feed(_, info, options):
# print(q.compile(compile_kwargs={"literal_binds": True}))
- shouts = []
- with local_session() as session:
- for [shout, reacted_stat, commented_stat, rating_stat, last_comment] in session.execute(
- q
- ).unique():
- shouts.append(shout)
- shout.stat = {
- "viewed": shout.views,
- "reacted": reacted_stat,
- "commented": commented_stat,
- "rating": rating_stat,
- }
-
- return shouts
+ return get_shouts_from_query(q)
diff --git a/schema.graphql b/schema.graphql
index 79b26c0b..91aecb55 100644
--- a/schema.graphql
+++ b/schema.graphql
@@ -212,14 +212,13 @@ input AuthorsBy {
}
input LoadShoutsFilters {
- title: String
- body: String
topic: String
author: String
layout: String
excludeLayout: String
visibility: String
- days: Int
+ fromDate: String
+ toDate: String
reacted: Boolean
}
@@ -232,6 +231,12 @@ input LoadShoutsOptions {
order_by_desc: Boolean
}
+input LoadRandomTopShoutsParams {
+ filters: LoadShoutsFilters
+ limit: Int!
+ fromRandomCount: Int
+}
+
input ReactionBy {
shout: String # slug
shouts: [String]
@@ -276,6 +281,7 @@ type Query {
loadAuthorsBy(by: AuthorsBy, limit: Int, offset: Int): [Author]!
loadShout(slug: String, shout_id: Int): Shout
loadShouts(options: LoadShoutsOptions): [Shout]!
+ loadRandomTopShouts(params: LoadRandomTopShoutsParams): [Shout]!
loadDrafts: [Shout]!
loadReactionsBy(by: ReactionBy!, limit: Int, offset: Int): [Reaction]!
userFollowers(slug: String!): [Author]!
From f5a3e273a61d98e7bcf4e335257b62e60def4273 Mon Sep 17 00:00:00 2001
From: Igor Lobanov
Date: Thu, 14 Dec 2023 19:40:12 +0100
Subject: [PATCH 22/27] unrated shouts query (#110)
Co-authored-by: Igor Lobanov
---
resolvers/zine/load.py | 28 ++++++++++++++++++++++++++++
schema.graphql | 1 +
2 files changed, 29 insertions(+)
diff --git a/resolvers/zine/load.py b/resolvers/zine/load.py
index db1ee0e2..aecda7e1 100644
--- a/resolvers/zine/load.py
+++ b/resolvers/zine/load.py
@@ -245,6 +245,34 @@ async def load_random_top_shouts(_, info, params):
return get_shouts_from_query(q)
+@query.field("loadUnratedShouts")
+async def load_unrated_shouts(_, info, limit):
+ q = (
+ select(Shout)
+ .options(
+ joinedload(Shout.authors),
+ joinedload(Shout.topics),
+ )
+ .outerjoin(
+ Reaction,
+ and_(
+ Reaction.shout == Shout.id,
+ Reaction.replyTo.is_(None),
+ Reaction.kind.in_([ReactionKind.LIKE, ReactionKind.DISLIKE]),
+ ),
+ )
+ .where(and_(Shout.deletedAt.is_(None), Shout.layout.is_not(None), Reaction.id.is_(None)))
+ )
+
+ q = add_stat_columns(q)
+
+ q = q.group_by(Shout.id).order_by(desc(Shout.createdAt)).limit(limit)
+
+ # print(q.compile(compile_kwargs={"literal_binds": True}))
+
+ return get_shouts_from_query(q)
+
+
@query.field("loadDrafts")
@login_required
async def get_drafts(_, info):
diff --git a/schema.graphql b/schema.graphql
index 91aecb55..56517191 100644
--- a/schema.graphql
+++ b/schema.graphql
@@ -282,6 +282,7 @@ type Query {
loadShout(slug: String, shout_id: Int): Shout
loadShouts(options: LoadShoutsOptions): [Shout]!
loadRandomTopShouts(params: LoadRandomTopShoutsParams): [Shout]!
+ loadUnratedShouts(limit: Int!): [Shout]!
loadDrafts: [Shout]!
loadReactionsBy(by: ReactionBy!, limit: Int, offset: Int): [Reaction]!
userFollowers(slug: String!): [Author]!
From e23e3791020cc4ac9cef958a1c6f98eff81c8739 Mon Sep 17 00:00:00 2001
From: Igor Lobanov
Date: Sat, 16 Dec 2023 14:47:58 +0100
Subject: [PATCH 23/27] unrated shouts query update (#111)
Co-authored-by: Igor Lobanov
---
resolvers/zine/load.py | 30 +++++++++++++++++++++++++++---
1 file changed, 27 insertions(+), 3 deletions(-)
diff --git a/resolvers/zine/load.py b/resolvers/zine/load.py
index aecda7e1..bda57f6d 100644
--- a/resolvers/zine/load.py
+++ b/resolvers/zine/load.py
@@ -2,7 +2,16 @@ import json
from datetime import datetime, timedelta
from sqlalchemy.orm import aliased, joinedload
-from sqlalchemy.sql.expression import and_, asc, case, desc, func, nulls_last, select
+from sqlalchemy.sql.expression import (
+ and_,
+ asc,
+ case,
+ desc,
+ distinct,
+ func,
+ nulls_last,
+ select,
+)
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
@@ -247,6 +256,8 @@ async def load_random_top_shouts(_, info, params):
@query.field("loadUnratedShouts")
async def load_unrated_shouts(_, info, limit):
+ auth: AuthCredentials = info.context["request"].auth
+
q = (
select(Shout)
.options(
@@ -261,12 +272,25 @@ async def load_unrated_shouts(_, info, limit):
Reaction.kind.in_([ReactionKind.LIKE, ReactionKind.DISLIKE]),
),
)
- .where(and_(Shout.deletedAt.is_(None), Shout.layout.is_not(None), Reaction.id.is_(None)))
+ .where(
+ and_(
+ Shout.deletedAt.is_(None),
+ Shout.layout.is_not(None),
+ Shout.createdAt >= (datetime.now() - timedelta(days=14)).date(),
+ )
+ )
)
+ user_id = auth.user_id
+ if user_id:
+ q = q.where(Reaction.createdBy != user_id)
+
+ # 3 or fewer votes is 0, 1, 2 or 3 votes (null, reaction id1, reaction id2, reaction id3)
+ q = q.having(func.count(distinct(Reaction.id)) <= 4)
+
q = add_stat_columns(q)
- q = q.group_by(Shout.id).order_by(desc(Shout.createdAt)).limit(limit)
+ q = q.group_by(Shout.id).order_by(func.random()).limit(limit)
# print(q.compile(compile_kwargs={"literal_binds": True}))
From ff834987d410c5c15ea2e31f5cc801c5d023e1aa Mon Sep 17 00:00:00 2001
From: Igor Lobanov
Date: Mon, 18 Dec 2023 14:38:45 +0100
Subject: [PATCH 24/27] unrated shouts query fix (#112)
Co-authored-by: Igor Lobanov
---
resolvers/zine/load.py | 31 ++++++++++++++++++++++++-------
1 file changed, 24 insertions(+), 7 deletions(-)
diff --git a/resolvers/zine/load.py b/resolvers/zine/load.py
index bda57f6d..c5f04d6e 100644
--- a/resolvers/zine/load.py
+++ b/resolvers/zine/load.py
@@ -257,6 +257,9 @@ async def load_random_top_shouts(_, info, params):
@query.field("loadUnratedShouts")
async def load_unrated_shouts(_, info, limit):
auth: AuthCredentials = info.context["request"].auth
+ user_id = auth.user_id
+
+ aliased_reaction = aliased(Reaction)
q = (
select(Shout)
@@ -272,22 +275,36 @@ async def load_unrated_shouts(_, info, limit):
Reaction.kind.in_([ReactionKind.LIKE, ReactionKind.DISLIKE]),
),
)
- .where(
+ )
+
+ if user_id:
+ q = q.outerjoin(
+ aliased_reaction,
and_(
- Shout.deletedAt.is_(None),
- Shout.layout.is_not(None),
- Shout.createdAt >= (datetime.now() - timedelta(days=14)).date(),
- )
+ aliased_reaction.shout == Shout.id,
+ aliased_reaction.replyTo.is_(None),
+ aliased_reaction.kind.in_([ReactionKind.LIKE, ReactionKind.DISLIKE]),
+ aliased_reaction.createdBy == user_id,
+ ),
+ )
+
+ q = q.where(
+ and_(
+ Shout.deletedAt.is_(None),
+ Shout.layout.is_not(None),
+ Shout.createdAt >= (datetime.now() - timedelta(days=14)).date(),
)
)
- user_id = auth.user_id
if user_id:
- q = q.where(Reaction.createdBy != user_id)
+ q = q.where(Shout.createdBy != user_id)
# 3 or fewer votes is 0, 1, 2 or 3 votes (null, reaction id1, reaction id2, reaction id3)
q = q.having(func.count(distinct(Reaction.id)) <= 4)
+ if user_id:
+ q = q.having(func.count(distinct(aliased_reaction.id)) == 0)
+
q = add_stat_columns(q)
q = q.group_by(Shout.id).order_by(func.random()).limit(limit)
From f395832d32b4e0a9ce241ff3776ca18576e557b3 Mon Sep 17 00:00:00 2001
From: Igor Lobanov
Date: Thu, 21 Dec 2023 00:53:53 +0100
Subject: [PATCH 25/27] random topic shouts query, published date filter in
random tops (#113)
Co-authored-by: Igor Lobanov
---
resolvers/zine/load.py | 41 +++++++++++++++++++++++++++++++++++-----
resolvers/zine/topics.py | 19 ++++++++++++++++---
schema.graphql | 6 ++++++
3 files changed, 58 insertions(+), 8 deletions(-)
diff --git a/resolvers/zine/load.py b/resolvers/zine/load.py
index c5f04d6e..2cbe20d6 100644
--- a/resolvers/zine/load.py
+++ b/resolvers/zine/load.py
@@ -22,6 +22,7 @@ from orm import TopicFollower
from orm.reaction import Reaction, ReactionKind
from orm.shout import Shout, ShoutAuthor, ShoutTopic
from orm.user import AuthorFollower
+from resolvers.zine.topics import get_random_topic
def get_shouts_from_query(q):
@@ -79,7 +80,8 @@ def add_stat_columns(q):
return q
-def apply_filters(q, filters, user_id=None): # noqa: C901
+# use_published_date is a quick fix, will be reworked as a part of tech debt
+def apply_filters(q, filters, user_id=None, use_published_date=False): # noqa: C901
if filters.get("reacted") and user_id:
q.join(Reaction, Reaction.createdBy == user_id)
@@ -100,12 +102,17 @@ def apply_filters(q, filters, user_id=None): # noqa: C901
if filters.get("fromDate"):
# fromDate: '2022-12-31
date_from = datetime.strptime(filters.get("fromDate"), "%Y-%m-%d")
- q = q.filter(Shout.createdAt >= date_from)
+ if use_published_date:
+ q = q.filter(Shout.publishedAt >= date_from)
+ else:
+ q = q.filter(Shout.createdAt >= date_from)
if filters.get("toDate"):
# toDate: '2023-12-31'
date_to = datetime.strptime(filters.get("toDate"), "%Y-%m-%d")
- q = q.filter(Shout.createdAt < (date_to + timedelta(days=1)))
-
+ if use_published_date:
+ q = q.filter(Shout.publishedAt < (date_to + timedelta(days=1)))
+ else:
+ q = q.filter(Shout.createdAt < (date_to + timedelta(days=1)))
return q
@@ -228,7 +235,8 @@ async def load_random_top_shouts(_, info, params):
.where(and_(Shout.deletedAt.is_(None), Shout.layout.is_not(None)))
)
- subquery = apply_filters(subquery, params.get("filters", {}))
+ subquery = apply_filters(subquery, params.get("filters", {}), use_published_date=True)
+
subquery = subquery.group_by(Shout.id).order_by(desc(get_rating_func(aliased_reaction)))
from_random_count = params.get("fromRandomCount")
@@ -254,6 +262,29 @@ async def load_random_top_shouts(_, info, params):
return get_shouts_from_query(q)
+@query.field("loadRandomTopicShouts")
+async def load_random_topic_shouts(_, info, limit):
+ topic = get_random_topic()
+
+ q = (
+ select(Shout)
+ .options(
+ joinedload(Shout.authors),
+ joinedload(Shout.topics),
+ )
+ .join(ShoutTopic, and_(Shout.id == ShoutTopic.shout, ShoutTopic.topic == topic.id))
+ .where(and_(Shout.deletedAt.is_(None), Shout.layout.is_not(None)))
+ )
+
+ q = add_stat_columns(q)
+
+ q = q.group_by(Shout.id).order_by(desc(Shout.createdAt)).limit(limit)
+
+ shouts = get_shouts_from_query(q)
+
+ return {"topic": topic, "shouts": shouts}
+
+
@query.field("loadUnratedShouts")
async def load_unrated_shouts(_, info, limit):
auth: AuthCredentials = info.context["request"].auth
diff --git a/resolvers/zine/topics.py b/resolvers/zine/topics.py
index ad4f59fc..c9c9aae0 100644
--- a/resolvers/zine/topics.py
+++ b/resolvers/zine/topics.py
@@ -12,11 +12,12 @@ from orm.topic import Topic, TopicFollower
def add_topic_stat_columns(q):
aliased_shout_author = aliased(ShoutAuthor)
aliased_topic_follower = aliased(TopicFollower)
+ aliased_shout_topic = aliased(ShoutTopic)
q = (
- q.outerjoin(ShoutTopic, Topic.id == ShoutTopic.topic)
- .add_columns(func.count(distinct(ShoutTopic.shout)).label("shouts_stat"))
- .outerjoin(aliased_shout_author, ShoutTopic.shout == aliased_shout_author.shout)
+ q.outerjoin(aliased_shout_topic, Topic.id == aliased_shout_topic.topic)
+ .add_columns(func.count(distinct(aliased_shout_topic.shout)).label("shouts_stat"))
+ .outerjoin(aliased_shout_author, aliased_shout_topic.shout == aliased_shout_author.shout)
.add_columns(func.count(distinct(aliased_shout_author.user)).label("authors_stat"))
.outerjoin(aliased_topic_follower)
.add_columns(func.count(distinct(aliased_topic_follower.follower)).label("followers_stat"))
@@ -146,6 +147,18 @@ def topic_unfollow(user_id, slug):
return False
+def get_random_topic():
+ q = select(Topic)
+ q = q.join(ShoutTopic)
+ q = q.group_by(Topic.id)
+ q = q.having(func.count(distinct(ShoutTopic.shout)) > 10)
+ q = q.order_by(func.random()).limit(1)
+
+ with local_session() as session:
+ [topic] = session.execute(q).first()
+ return topic
+
+
@query.field("topicsRandom")
async def topics_random(_, info, amount=12):
q = select(Topic)
diff --git a/schema.graphql b/schema.graphql
index 56517191..92560991 100644
--- a/schema.graphql
+++ b/schema.graphql
@@ -264,6 +264,11 @@ type MySubscriptionsQueryResult {
authors: [Author]!
}
+type RandomTopicShoutsQueryResult {
+ topic: Topic!
+ shouts: [Shout]!
+}
+
type Query {
# inbox
loadChats( limit: Int, offset: Int): Result! # your chats
@@ -282,6 +287,7 @@ type Query {
loadShout(slug: String, shout_id: Int): Shout
loadShouts(options: LoadShoutsOptions): [Shout]!
loadRandomTopShouts(params: LoadRandomTopShoutsParams): [Shout]!
+ loadRandomTopicShouts(limit: Int!): RandomTopicShoutsQueryResult!
loadUnratedShouts(limit: Int!): [Shout]!
loadDrafts: [Shout]!
loadReactionsBy(by: ReactionBy!, limit: Int, offset: Int): [Reaction]!
From 67576d0a5bb9760eb2e20a4476fa0bbe20be8620 Mon Sep 17 00:00:00 2001
From: Igor Lobanov
Date: Thu, 21 Dec 2023 11:49:28 +0100
Subject: [PATCH 26/27] only published in random topic shouts (#114)
---
resolvers/zine/load.py | 4 +++-
1 file changed, 3 insertions(+), 1 deletion(-)
diff --git a/resolvers/zine/load.py b/resolvers/zine/load.py
index 2cbe20d6..58b4ad68 100644
--- a/resolvers/zine/load.py
+++ b/resolvers/zine/load.py
@@ -273,7 +273,9 @@ async def load_random_topic_shouts(_, info, limit):
joinedload(Shout.topics),
)
.join(ShoutTopic, and_(Shout.id == ShoutTopic.shout, ShoutTopic.topic == topic.id))
- .where(and_(Shout.deletedAt.is_(None), Shout.layout.is_not(None)))
+ .where(
+ and_(Shout.deletedAt.is_(None), Shout.layout.is_not(None), Shout.visibility == "public")
+ )
)
q = add_stat_columns(q)
From c3a482614ea47ec174069d8a88c36a97c9f2a2b5 Mon Sep 17 00:00:00 2001
From: Igor Lobanov
Date: Mon, 8 Jan 2024 09:20:29 +0100
Subject: [PATCH 27/27] robo migrate script
---
resolvers/zine/load.py | 2 +-
robo_migrate_a2.sh | 220 +++++++++++++++++++++++++++++++++++++++++
2 files changed, 221 insertions(+), 1 deletion(-)
create mode 100644 robo_migrate_a2.sh
diff --git a/resolvers/zine/load.py b/resolvers/zine/load.py
index 58b4ad68..8358ae5e 100644
--- a/resolvers/zine/load.py
+++ b/resolvers/zine/load.py
@@ -87,7 +87,7 @@ def apply_filters(q, filters, user_id=None, use_published_date=False): # noqa:
v = filters.get("visibility")
if v == "public":
- q = q.filter(Shout.visibility == filters.get("visibility"))
+ q = q.filter(Shout.visibility == "public")
if v == "community":
q = q.filter(Shout.visibility.in_(["public", "community"]))
diff --git a/robo_migrate_a2.sh b/robo_migrate_a2.sh
new file mode 100644
index 00000000..7aa5fe6e
--- /dev/null
+++ b/robo_migrate_a2.sh
@@ -0,0 +1,220 @@
+#!/bin/bash
+# This version is a2.1 because have update in postgres dsn to ip adress
+
+export PATH="$PATH:/usr/local/sbin:/usr/sbin:/sbin"
+
+APP="discoursio-api"
+SSH_KEY="/root/.ssh/id_rsa"
+YMD=$(date "+%Y-%m-%d")
+DUMP_PATH="/var/lib/dokku/data/storage/discoursio-api/migration/dump"
+DATA_PATH="/var/lib/dokku/data/storage/discoursio-api/migration/data"
+SCRIPT_PATH="/root/robo_script"
+MONGO_DB_PATH="/var/backups/mongodb"
+POSTGRES_DB_PATH="/var/backups/postgres"
+CONTAINER_ID=$(docker ps | grep "$APP" | /bin/awk '{print $1}')
+OLD_DB=$(dokku postgres:app-links "$APP")
+NEW_DB="discoursio-db-$YMD"
+DSN_OLD_DB=$(dokku config:get "$APP" DATABASE_URL)
+LAST_DB_MONGO=$(find "$MONGO_DB_PATH" -printf '%T@ %p\n' | sort -nk1 | grep discours | tail -n 1 | /bin/awk '{print $2}')
+LAST_DB_POSTGRES=$(find "$POSTGRES_DB_PATH" -printf '%T@ %p\n' | sort -nk1 | grep discours | tail -n 1 | /bin/awk '{print $2}')
+NEW_HOST="testapi.discours.io"
+NEW_PATH="/root/."
+
+increase_swap() {
+ echo "Make Swap 6GB"
+ swapoff -a
+ dd if=/dev/zero of=/swap_file bs=1M count=6144
+ chmod 600 /swap_file
+ mkswap /swap_file
+ swapon /swap_file
+}
+
+check_container() {
+ if [ -z "$CONTAINER_ID" ]; then
+ echo "Container $APP is not Running"
+ exit 1
+ fi
+ echo "Container $APP is running"
+}
+
+check_dump_dir() {
+ if [ ! -d $DUMP_PATH ]; then
+ echo "$DUMP_PATH dosn't exist"
+ exit 1
+ else
+ echo "$DUMP_PATH exist (^.-)"
+ fi
+ if [ ! -d $DATA_PATH ]; then
+ echo "$DATA_PATH dosn't exist"
+ exit 1
+ else
+ echo "$DATA_PATH exist (-.^)"
+ fi
+}
+
+check_old_db() {
+ if [ -z "$OLD_DB" ]; then
+ echo "DB postgres is not set"
+ exit 1
+ fi
+ echo "DB postgres is set"
+}
+
+check_app_config() {
+ if $(dokku docker-options:report $APP | grep -q $DUMP_PATH) && $(dokku docker-options:report $APP | grep -q $DATA_PATH); then
+ echo "DUMP_PATH and DATA_PATH exist in $APP config"
+ else
+ echo "DUMP_PATH or DATA_PATH does not exist in $APP config"
+ exit 1
+ fi
+}
+
+
+untar_mongo_db() {
+ if [ -d "$DUMP_PATH/discours" ]; then
+ echo "$DUMP_PATH/discours File exists"
+ else
+ tar xzf $LAST_DB_MONGO && mv *.bson/discours $DUMP_PATH/ && rm -R *.bson
+ fi
+ echo "Untar Bson from mongoDB"
+}
+
+bson_mode() {
+ CONTAINER_ID=$(docker ps | grep "$APP" | /bin/awk '{print $1}')
+
+ if [ -z "$CONTAINER_ID" ]; then
+ echo "Container $APP is not Running"
+ exit 1
+ fi
+
+ docker exec -t "$CONTAINER_ID" rm -rf dump
+ docker exec -t "$CONTAINER_ID" ln -s /migration/dump dump
+
+ docker exec -t "$CONTAINER_ID" rm -rf migration/data
+ docker exec -t "$CONTAINER_ID" ln -s /migration/data migration/data
+
+ docker exec -t "$CONTAINER_ID" python3 server.py bson
+}
+
+create_new_postgres_db() {
+ echo "Create NEW postgres DB"
+ dokku postgres:create "$NEW_DB"
+
+ # Get the internal IP address
+ INTERNAL_IP=$(dokku postgres:info "$NEW_DB" | grep 'Internal ip:' | awk '{print $3}')
+
+ # Get the DSN without the hostname
+ DSN=$(dokku postgres:info "$NEW_DB" --dsn | sed 's/postgres/postgresql/')
+
+ # Replace the hostname with the internal IP address
+ DSN_NEW_DB=$(echo "$DSN" | sed "s@dokku-postgres-$NEW_DB@$INTERNAL_IP@")
+
+ echo "$DSN_NEW_DB"
+ dokku postgres:link "$NEW_DB" "$APP" -a "MIGRATION_DATABASE"
+ dokku config:set "$APP" MIGRATION_DATABASE_URL="$DSN_NEW_DB" --no-restart
+
+ # Wait for 120 seconds
+ echo "Waiting for 120 seconds..."
+ for i in {1..120}; do
+ sleep 1
+ echo -n "(^.^') "
+ done
+}
+
+migrate_jsons() {
+
+CONTAINER_ID=$(docker ps | grep $APP | /bin/awk '{print $1}')
+
+ if [ -z "$CONTAINER_ID" ]; then
+ echo "Container $APP is not Running"
+ exit 1
+ fi
+
+docker exec -t "$CONTAINER_ID" rm -rf dump
+docker exec -t "$CONTAINER_ID" ln -s /migration/dump dump
+
+docker exec -t "$CONTAINER_ID" rm -rf migration/data
+docker exec -t "$CONTAINER_ID" ln -s /migration/data migration/data
+
+docker exec -t --env DATABASE_URL="$DSN_NEW_DB" "$CONTAINER_ID" python3 server.py migrate
+}
+
+restart_and_clean() {
+dokku ps:stop "$APP"
+dokku config:unset "$APP" MIGRATION_DATABASE_URL --no-restart
+dokku config:unset "$APP" DATABASE_URL --no-restart
+dokku config:set "$APP" DATABASE_URL="$DSN_NEW_DB" --no-restart
+dokku postgres:unlink "$OLD_DB" "$APP"
+dokku ps:start "$APP"
+}
+
+send_postgres_dump() {
+echo "send postgres.dump to $NEW_HOST"
+scp -i "$SSH_KEY" -r "$LAST_DB_POSTGRES" "root@$NEW_HOST:$NEW_PATH"
+}
+
+delete_files() {
+rm -rf $DUMP_PATH/*
+rm -rf $DATA_PATH/*
+}
+
+configure_pgweb() {
+echo "config PGWEB"
+dokku ps:stop pgweb
+dokku config:unset pgweb DATABASE_URL --no-restart
+dokku postgres:unlink "$OLD_DB" pgweb
+dokku postgres:link "$NEW_DB" pgweb -a "DATABASE"
+dokku postgres:destroy "$OLD_DB" -f
+dokku ps:start pgweb
+}
+
+rm_old_db() {
+ echo "remove old DB"
+ dokku postgres:destroy "$OLD_DB" -f
+}
+
+decrease_swap() {
+echo "make swap 2gb again"
+swapoff -a
+dd if=/dev/zero of=/swap_file bs=1M count=2048
+chmod 600 /swap_file
+mkswap /swap_file
+swapon /swap_file
+}
+
+# Main script flow
+increase_swap
+check_container
+check_dump_dir
+check_old_db
+check_app_config
+untar_mongo_db
+
+if bson_mode; then
+ create_new_postgres_db
+else
+ echo "BSON move didn't work well! ERROR!"
+
+ decrease_swap
+ delete_files
+
+ exit 1
+fi
+
+if migrate_jsons; then
+ restart_and_clean
+else
+ echo "MIGRATE move didn't work well! ERROR!"
+
+ delete_files
+ rm_old_db
+ decrease_swap
+
+ exit 1
+fi
+
+send_postgres_dump
+delete_files
+#configure_pgweb
+rm_old_db
+decrease_swap