mirror of
https://github.com/paul-nameless/tg
synced 2024-11-22 03:43:19 +00:00
Merge pull request #57 from paul-nameless/reply-msg
Add msg reply support
This commit is contained in:
commit
3baba619a5
4 changed files with 69 additions and 2 deletions
|
@ -7,7 +7,7 @@ DEFAULT_CONFIG = os.path.expanduser("~/.config/tg/tg.conf")
|
|||
DEFAULT_FILES = os.path.expanduser("~/.cache/tg/")
|
||||
max_download_size = "10MB"
|
||||
record_cmd = "ffmpeg -f avfoundation -i ':0' -ar 22050 -b:a 32k '{file_path}'"
|
||||
long_msg_cmd = "vim -c 'startinsert' {file_path}"
|
||||
long_msg_cmd = "vim + -c 'startinsert' {file_path}"
|
||||
editor = os.environ.get("EDITOR", "vi")
|
||||
|
||||
|
||||
|
|
|
@ -26,7 +26,7 @@ from tg.views import View
|
|||
log = logging.getLogger(__name__)
|
||||
|
||||
MSGS_LEFT_SCROLL_THRESHOLD = 10
|
||||
|
||||
REPLY_MSG_PREFIX = "# >"
|
||||
|
||||
# start scrolling to next page when number of the msgs left is less than value.
|
||||
# note, that setting high values could lead to situations when long msgs will
|
||||
|
@ -96,6 +96,8 @@ class Controller:
|
|||
"a": lambda _: self.write_short_msg(),
|
||||
"I": lambda _: self.write_long_msg(),
|
||||
"A": lambda _: self.write_long_msg(),
|
||||
"r": lambda _: self.reply_message(),
|
||||
"R": lambda _: self.reply_with_long_message(),
|
||||
"bp": lambda _: self.breakpoint(),
|
||||
" ": lambda _: self.toggle_select_msg(),
|
||||
"^[": lambda _: self.discard_selected_msgs(), # esc
|
||||
|
@ -213,6 +215,29 @@ class Controller:
|
|||
with suspend(self.view):
|
||||
breakpoint()
|
||||
|
||||
def reply_message(self):
|
||||
# write new message
|
||||
if msg := self.view.status.get_input():
|
||||
self.model.reply_message(text=msg)
|
||||
self.present_info("Message reply sent")
|
||||
else:
|
||||
self.present_info("Message reply wasn't sent")
|
||||
|
||||
def reply_with_long_message(self):
|
||||
msg = MsgProxy(self.model.current_msg)
|
||||
with NamedTemporaryFile("w+", suffix=".txt") as f, suspend(
|
||||
self.view
|
||||
) as s:
|
||||
f.write(insert_replied_msg(msg))
|
||||
f.seek(0)
|
||||
s.call(config.long_msg_cmd.format(file_path=f.name))
|
||||
with open(f.name) as f:
|
||||
if msg := strip_replied_msg(f.read().strip()):
|
||||
self.model.reply_message(text=msg)
|
||||
self.present_info("Message sent")
|
||||
else:
|
||||
self.present_info("Message wasn't sent")
|
||||
|
||||
def write_short_msg(self):
|
||||
# write new message
|
||||
if msg := self.view.status.get_input():
|
||||
|
@ -450,3 +475,22 @@ class Controller:
|
|||
self.model.current_chat = i
|
||||
break
|
||||
self.render()
|
||||
|
||||
|
||||
def insert_replied_msg(msg: MsgProxy) -> str:
|
||||
text = msg.text_content if msg.is_text else msg.content_type
|
||||
return (
|
||||
"\n".join([f"{REPLY_MSG_PREFIX} {line}" for line in text.split("\n")])
|
||||
# adding line with whitespace so text editor could start editing from last line
|
||||
+ "\n "
|
||||
)
|
||||
|
||||
|
||||
def strip_replied_msg(msg: str) -> str:
|
||||
return "\n".join(
|
||||
[
|
||||
line
|
||||
for line in msg.split("\n")
|
||||
if not line.startswith(REPLY_MSG_PREFIX)
|
||||
]
|
||||
)
|
||||
|
|
|
@ -120,6 +120,14 @@ class Model:
|
|||
limit = offset + page_size
|
||||
return self.chats.fetch_chats(offset=offset, limit=limit)
|
||||
|
||||
def reply_message(self, text: str) -> bool:
|
||||
chat_id = self.chats.id_by_index(self.current_chat)
|
||||
if chat_id is None:
|
||||
return False
|
||||
reply_to_msg = self.current_msg_id
|
||||
self.tg.reply_message(chat_id, reply_to_msg, text)
|
||||
return True
|
||||
|
||||
def send_message(self, text: str) -> bool:
|
||||
chat_id = self.chats.id_by_index(self.current_chat)
|
||||
if chat_id is None:
|
||||
|
|
15
tg/tdlib.py
15
tg/tdlib.py
|
@ -20,6 +20,21 @@ class Tdlib(Telegram):
|
|||
)
|
||||
result.wait()
|
||||
|
||||
def reply_message(
|
||||
self, chat_id: int, reply_to_message_id: int, text: str
|
||||
) -> AsyncResult:
|
||||
data = {
|
||||
"@type": "sendMessage",
|
||||
"chat_id": chat_id,
|
||||
"reply_to_message_id": reply_to_message_id,
|
||||
"input_message_content": {
|
||||
"@type": "inputMessageText",
|
||||
"text": {"@type": "formattedText", "text": text},
|
||||
},
|
||||
}
|
||||
|
||||
return self._send_data(data)
|
||||
|
||||
def send_doc(self, file_path: str, chat_id: int) -> AsyncResult:
|
||||
data = {
|
||||
"@type": "sendMessage",
|
||||
|
|
Loading…
Reference in a new issue