commit
3cb092abb9
@ -0,0 +1,251 @@ |
||||
|
||||
# Created by https://www.toptal.com/developers/gitignore/api/pycharm,python |
||||
# Edit at https://www.toptal.com/developers/gitignore?templates=pycharm,python |
||||
|
||||
### PyCharm ### |
||||
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider |
||||
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 |
||||
|
||||
# User-specific stuff |
||||
.idea/**/workspace.xml |
||||
.idea/**/tasks.xml |
||||
.idea/**/usage.statistics.xml |
||||
.idea/**/dictionaries |
||||
.idea/**/shelf |
||||
|
||||
# Generated files |
||||
.idea/**/contentModel.xml |
||||
|
||||
# Sensitive or high-churn files |
||||
.idea/**/dataSources/ |
||||
.idea/**/dataSources.ids |
||||
.idea/**/dataSources.local.xml |
||||
.idea/**/sqlDataSources.xml |
||||
.idea/**/dynamic.xml |
||||
.idea/**/uiDesigner.xml |
||||
.idea/**/dbnavigator.xml |
||||
|
||||
# Gradle |
||||
.idea/**/gradle.xml |
||||
.idea/**/libraries |
||||
|
||||
# Gradle and Maven with auto-import |
||||
# When using Gradle or Maven with auto-import, you should exclude module files, |
||||
# since they will be recreated, and may cause churn. Uncomment if using |
||||
# auto-import. |
||||
# .idea/artifacts |
||||
# .idea/compiler.xml |
||||
# .idea/jarRepositories.xml |
||||
# .idea/modules.xml |
||||
# .idea/*.iml |
||||
# .idea/modules |
||||
# *.iml |
||||
# *.ipr |
||||
|
||||
# CMake |
||||
cmake-build-*/ |
||||
|
||||
# Mongo Explorer plugin |
||||
.idea/**/mongoSettings.xml |
||||
|
||||
# File-based project format |
||||
*.iws |
||||
|
||||
# IntelliJ |
||||
out/ |
||||
|
||||
# mpeltonen/sbt-idea plugin |
||||
.idea_modules/ |
||||
|
||||
# JIRA plugin |
||||
atlassian-ide-plugin.xml |
||||
|
||||
# Cursive Clojure plugin |
||||
.idea/replstate.xml |
||||
|
||||
# Crashlytics plugin (for Android Studio and IntelliJ) |
||||
com_crashlytics_export_strings.xml |
||||
crashlytics.properties |
||||
crashlytics-build.properties |
||||
fabric.properties |
||||
|
||||
# Editor-based Rest Client |
||||
.idea/httpRequests |
||||
|
||||
# Android studio 3.1+ serialized cache file |
||||
.idea/caches/build_file_checksums.ser |
||||
|
||||
### PyCharm Patch ### |
||||
# Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721 |
||||
|
||||
# *.iml |
||||
# modules.xml |
||||
# .idea/misc.xml |
||||
# *.ipr |
||||
|
||||
# Sonarlint plugin |
||||
# https://plugins.jetbrains.com/plugin/7973-sonarlint |
||||
.idea/**/sonarlint/ |
||||
|
||||
# SonarQube Plugin |
||||
# https://plugins.jetbrains.com/plugin/7238-sonarqube-community-plugin |
||||
.idea/**/sonarIssues.xml |
||||
|
||||
# Markdown Navigator plugin |
||||
# https://plugins.jetbrains.com/plugin/7896-markdown-navigator-enhanced |
||||
.idea/**/markdown-navigator.xml |
||||
.idea/**/markdown-navigator-enh.xml |
||||
.idea/**/markdown-navigator/ |
||||
|
||||
# Cache file creation bug |
||||
# See https://youtrack.jetbrains.com/issue/JBR-2257 |
||||
.idea/$CACHE_FILE$ |
||||
|
||||
# CodeStream plugin |
||||
# https://plugins.jetbrains.com/plugin/12206-codestream |
||||
.idea/codestream.xml |
||||
|
||||
### Python ### |
||||
# Byte-compiled / optimized / DLL files |
||||
__pycache__/ |
||||
*.py[cod] |
||||
*$py.class |
||||
|
||||
# C extensions |
||||
*.so |
||||
|
||||
# Distribution / packaging |
||||
.Python |
||||
build/ |
||||
develop-eggs/ |
||||
dist/ |
||||
downloads/ |
||||
eggs/ |
||||
.eggs/ |
||||
lib/ |
||||
lib64/ |
||||
parts/ |
||||
sdist/ |
||||
var/ |
||||
wheels/ |
||||
pip-wheel-metadata/ |
||||
share/python-wheels/ |
||||
*.egg-info/ |
||||
.installed.cfg |
||||
*.egg |
||||
MANIFEST |
||||
|
||||
# PyInstaller |
||||
# Usually these files are written by a python script from a template |
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it. |
||||
*.manifest |
||||
*.spec |
||||
|
||||
# Installer logs |
||||
pip-log.txt |
||||
pip-delete-this-directory.txt |
||||
|
||||
# Unit test / coverage reports |
||||
htmlcov/ |
||||
.tox/ |
||||
.nox/ |
||||
.coverage |
||||
.coverage.* |
||||
.cache |
||||
nosetests.xml |
||||
coverage.xml |
||||
*.cover |
||||
*.py,cover |
||||
.hypothesis/ |
||||
.pytest_cache/ |
||||
pytestdebug.log |
||||
|
||||
# Translations |
||||
*.mo |
||||
*.pot |
||||
|
||||
# Django stuff: |
||||
*.log |
||||
local_settings.py |
||||
db.sqlite3 |
||||
db.sqlite3-journal |
||||
|
||||
# Flask stuff: |
||||
instance/ |
||||
.webassets-cache |
||||
|
||||
# Scrapy stuff: |
||||
.scrapy |
||||
|
||||
# Sphinx documentation |
||||
docs/_build/ |
||||
doc/_build/ |
||||
|
||||
# PyBuilder |
||||
target/ |
||||
|
||||
# Jupyter Notebook |
||||
.ipynb_checkpoints |
||||
|
||||
# IPython |
||||
profile_default/ |
||||
ipython_config.py |
||||
|
||||
# pyenv |
||||
.python-version |
||||
|
||||
# pipenv |
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. |
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies |
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not |
||||
# install all needed dependencies. |
||||
#Pipfile.lock |
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow |
||||
__pypackages__/ |
||||
|
||||
# Celery stuff |
||||
celerybeat-schedule |
||||
celerybeat.pid |
||||
|
||||
# SageMath parsed files |
||||
*.sage.py |
||||
|
||||
# Environments |
||||
.env |
||||
.venv |
||||
env/ |
||||
venv/ |
||||
ENV/ |
||||
env.bak/ |
||||
venv.bak/ |
||||
pythonenv* |
||||
|
||||
# Spyder project settings |
||||
.spyderproject |
||||
.spyproject |
||||
|
||||
# Rope project settings |
||||
.ropeproject |
||||
|
||||
# mkdocs documentation |
||||
/site |
||||
|
||||
# mypy |
||||
.mypy_cache/ |
||||
.dmypy.json |
||||
dmypy.json |
||||
|
||||
# Pyre type checker |
||||
.pyre/ |
||||
|
||||
# pytype static type analyzer |
||||
.pytype/ |
||||
|
||||
# profiling data |
||||
.prof |
||||
|
||||
# End of https://www.toptal.com/developers/gitignore/api/pycharm,python |
||||
|
||||
config.json |
||||
assets/ |
@ -0,0 +1,23 @@ |
||||
import json |
||||
import logging |
||||
|
||||
import discord |
||||
from d4dj_utils.manager.asset_manager import AssetManager |
||||
from discord.ext import commands |
||||
|
||||
logging.basicConfig(level=logging.INFO) |
||||
|
||||
with open('config.json') as f: |
||||
bot_token = json.load(f)['token'] |
||||
bot = commands.Bot(command_prefix='!', case_insensitive=True) |
||||
asset_manager = AssetManager('assets') |
||||
bot.load_extension('miyu_bot.commands.cogs.chart') |
||||
|
||||
|
||||
@bot.event |
||||
async def on_ready(): |
||||
logging.getLogger(__name__).info(f'Current server count: {len(bot.guilds)}') |
||||
await bot.change_presence(activity=discord.Game(name='test')) |
||||
|
||||
|
||||
bot.run(bot_token) |
@ -0,0 +1,106 @@ |
||||
import logging |
||||
|
||||
import discord |
||||
from d4dj_utils.master.chart_master import ChartDifficulty, ChartMaster |
||||
from d4dj_utils.master.common_enums import ChartSectionType |
||||
from d4dj_utils.master.music_master import MusicMaster |
||||
from discord.ext import commands |
||||
|
||||
from main import asset_manager |
||||
from miyu_bot.commands.common.fuzzy_matching import romanize, FuzzyMatcher |
||||
|
||||
|
||||
class Charts(commands.Cog): |
||||
def __init__(self, bot): |
||||
self.bot = bot |
||||
self.logger = logging.getLogger(__name__) |
||||
self.music = self.get_music() |
||||
|
||||
def get_music(self): |
||||
music = FuzzyMatcher(lambda m: m.is_released) |
||||
for m in asset_manager.music_master.values(): |
||||
music[f'{m.name} {m.special_unit_name}'] = m |
||||
return music |
||||
|
||||
difficulty_names = { |
||||
'expert': ChartDifficulty.Expert, |
||||
'hard': ChartDifficulty.Hard, |
||||
'normal': ChartDifficulty.Normal, |
||||
'easy': ChartDifficulty.Easy, |
||||
'exp': ChartDifficulty.Expert, |
||||
'hrd': ChartDifficulty.Hard, |
||||
'nrm': ChartDifficulty.Normal, |
||||
'esy': ChartDifficulty.Easy, |
||||
'ex': ChartDifficulty.Expert, |
||||
'hd': ChartDifficulty.Hard, |
||||
'nm': ChartDifficulty.Normal, |
||||
'es': ChartDifficulty.Easy, |
||||
} |
||||
|
||||
@commands.command() |
||||
async def chart(self, ctx, *, arg): |
||||
self.logger.info(f'Searching for chart "{arg}".') |
||||
|
||||
arg = arg.strip() |
||||
|
||||
if not arg: |
||||
await ctx.send('Argument is empty.') |
||||
return |
||||
|
||||
split_args = arg.split() |
||||
|
||||
difficulty = ChartDifficulty.Expert |
||||
if len(split_args) >= 2: |
||||
final_word = split_args[-1] |
||||
if final_word in self.difficulty_names: |
||||
difficulty = self.difficulty_names[final_word] |
||||
arg = ''.join(split_args[:-1]) |
||||
|
||||
song: MusicMaster = self.music[arg] |
||||
if not song: |
||||
msg = f'Failed to find chart "{arg}".' |
||||
await ctx.send(msg) |
||||
self.logger.info(msg) |
||||
return |
||||
self.logger.info(f'Found "{song}" ({romanize(song.name)[1]}).') |
||||
|
||||
chart: ChartMaster = song.charts[difficulty] |
||||
|
||||
chart_data = chart.load_chart_data() |
||||
note_counts = chart_data.get_note_counts() |
||||
|
||||
thumb = discord.File(song.jacket_path, filename='jacket.png') |
||||
render = discord.File(chart.image_path, filename='render.png') |
||||
|
||||
embed = discord.Embed(title=song.name) |
||||
embed.set_thumbnail(url=f'attachment://jacket.png') |
||||
embed.set_image(url=f'attachment://render.png') |
||||
|
||||
embed.add_field(name='Info', |
||||
value=f'Difficulty: {chart.display_level} ({chart.difficulty.name})\n' |
||||
f'Unit: {song.special_unit_name or song.unit.name}\n' |
||||
f'Category: {song.category.name}\n' |
||||
f'BPM: {song.bpm}', |
||||
inline=False) |
||||
embed.add_field(name='Combo', |
||||
value=f'Max Combo: {chart.note_counts[ChartSectionType.Full].count}\n' |
||||
f'Taps: {note_counts["tap"]} (dark: {note_counts["tap1"]}, light: {note_counts["tap2"]})\n' |
||||
f'Scratches: {note_counts["scratch"]} (left: {note_counts["scratch_left"]}, right: {note_counts["scratch_right"]})\n' |
||||
f'Stops: {note_counts["stop"]} (head: {note_counts["stop_start"]}, tail: {note_counts["stop_end"]})\n' |
||||
f'Long: {note_counts["long"]} (head: {note_counts["long_start"]}, tail: {note_counts["long_end"]})\n' |
||||
f'Slide: {note_counts["slide"]} (tick: {note_counts["slide_tick"]}, flick {note_counts["slide_flick"]})', |
||||
inline=True) |
||||
embed.add_field(name='Ratings', |
||||
value=f'NTS: {round(chart.trends[0] * 100, 2)}%\n' |
||||
f'DNG: {round(chart.trends[1] * 100, 2)}%\n' |
||||
f'SCR: {round(chart.trends[2] * 100, 2)}%\n' |
||||
f'EFT: {round(chart.trends[3] * 100, 2)}%\n' |
||||
f'TEC: {round(chart.trends[4] * 100, 2)}%\n', |
||||
inline=True |
||||
) |
||||
|
||||
await ctx.send(files=[thumb, render], embed=embed) |
||||
|
||||
|
||||
def setup(bot): |
||||
bot.add_cog(Charts(bot)) |
@ -0,0 +1,76 @@ |
||||
import logging |
||||
import re |
||||
from typing import Tuple |
||||
|
||||
import pykakasi |
||||
|
||||
|
||||
class FuzzyMatcher: |
||||
def __init__(self, filter, threshold: float = 1): |
||||
self.filter = filter or (lambda n: True) |
||||
self.threshold = threshold |
||||
self.values = {} |
||||
self.max_length = 0 |
||||
self.logger = logging.getLogger(__name__) |
||||
|
||||
def __setitem__(self, key, value): |
||||
k = romanize(key) |
||||
self.values[k] = value |
||||
self.max_length = len(k[0]) |
||||
|
||||
def __getitem__(self, key): |
||||
if len(key) > self.max_length * 1.1: |
||||
self.logger.debug(f'Rejected key "{key}" due to length.') |
||||
return None |
||||
key, _ = romanize(key) |
||||
result = min((k for k, v in self.values.items() if self.filter(v)), |
||||
key=lambda v: fuzzy_match_score(key, *v, threshold=self.threshold)) |
||||
if fuzzy_match_score(key, *result, threshold=self.threshold) > self.threshold: |
||||
return None |
||||
return self.values[result] |
||||
|
||||
|
||||
_insertion_weight = 0.001 |
||||
_deletion_weight = 1 |
||||
_substitution_weight = 1 |
||||
|
||||
|
||||
def fuzzy_match_score(source: str, target: str, words, threshold: float) -> float: |
||||
m = len(source) |
||||
n = len(target) |
||||
a = [[0] * (n + 1) for _ in range(m + 1)] |
||||
|
||||
for i in range(m + 1): |
||||
a[i][0] = i |
||||
|
||||
for i in range(n + 1): |
||||
a[0][i] = i * _insertion_weight |
||||
|
||||
def strip_vowels(s): |
||||
return re.sub('[aeoiu]', '', s) |
||||
|
||||
word_match_bonus = 0.1 * max(max(sum(a == b for a, b in zip(source, w)) for w in words), |
||||
max(sum(a == b for a, b in |
||||
zip(source[0] + strip_vowels(source[1:]), w[0] + strip_vowels(w[1:]))) for w in |
||||
words), |
||||
sum(a == b for a, b in zip(source, ''.join(w[0] for w in words)))) |
||||
|
||||
for i in range(1, m + 1): |
||||
for j in range(1, n + 1): |
||||
a[i][j] = min(a[i - 1][j - 1] + _substitution_weight if source[i - 1] != target[j - 1] else a[i - 1][j - 1], |
||||
a[i - 1][j] + _deletion_weight, |
||||
a[i][j - 1] + _insertion_weight) |
||||
if j == n and (a[i][j] - (m - i) * _insertion_weight - word_match_bonus) > threshold: |
||||
return 9999 |
||||
|
||||
return a[m][n] - word_match_bonus |
||||
|
||||
|
||||
def romanize(s: str) -> Tuple[str, Tuple[str]]: |
||||
kks = pykakasi.kakasi() |
||||
s = re.sub('[\']', '', s) |
||||
s = re.sub('[A-Za-z]+', lambda ele: f' {ele[0]} ', s) |
||||
s = ' '.join(c['hepburn'].strip().lower() for c in kks.convert(s)) |
||||
s = re.sub(r'[^a-zA-Z0-9_ ]+', '', s) |
||||
words = tuple(s.split()) |
||||
return ''.join(words), words |
@ -0,0 +1,19 @@ |
||||
import asyncio |
||||
import logging |
||||
import logging.config |
||||
|
||||
from d4dj_utils.manager.asset_manager import AssetManager |
||||
from d4dj_utils.manager.revision_manager import RevisionManager |
||||
|
||||
|
||||
async def main(): |
||||
logging.basicConfig(level=logging.INFO) |
||||
revision_manager = RevisionManager('assets') |
||||
await revision_manager.repair_downloads() |
||||
await revision_manager.update_assets() |
||||
manager = AssetManager('assets') |
||||
manager.render_charts() |
||||
|
||||
|
||||
if __name__ == '__main__': |
||||
asyncio.run(main()) |
Loading…
Reference in new issue