Compare commits
9 commits
10bc3e132e
...
d57b5573c9
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d57b5573c9 | ||
|
|
ba66b98200 | ||
|
|
e51bec4909 | ||
|
|
dbfb8db6d4 | ||
|
|
14a646e680 | ||
|
|
6e5c28c9c4 | ||
|
|
97d1b1ebe5 | ||
|
|
ec1905d37f | ||
|
|
d7c908a033 |
13 changed files with 994 additions and 0 deletions
179
.gitignore
vendored
Normal file
179
.gitignore
vendored
Normal file
|
|
@ -0,0 +1,179 @@
|
|||
# Created by https://www.gitignore.io/api/git,emacs,python,coffeescript
|
||||
# Edit at https://www.gitignore.io/?templates=git,emacs,python,coffeescript
|
||||
|
||||
### CoffeeScript ###
|
||||
*.js
|
||||
|
||||
### Emacs ###
|
||||
# -*- mode: gitignore; -*-
|
||||
*~
|
||||
\#*\#
|
||||
/.emacs.desktop
|
||||
/.emacs.desktop.lock
|
||||
*.elc
|
||||
auto-save-list
|
||||
tramp
|
||||
.\#*
|
||||
|
||||
# Org-mode
|
||||
.org-id-locations
|
||||
*_archive
|
||||
|
||||
# flymake-mode
|
||||
*_flymake.*
|
||||
|
||||
# eshell files
|
||||
/eshell/history
|
||||
/eshell/lastdir
|
||||
|
||||
# elpa packages
|
||||
/elpa/
|
||||
|
||||
# reftex files
|
||||
*.rel
|
||||
|
||||
# AUCTeX auto folder
|
||||
/auto/
|
||||
|
||||
# cask packages
|
||||
.cask/
|
||||
dist/
|
||||
|
||||
# Flycheck
|
||||
flycheck_*.el
|
||||
|
||||
# server auth directory
|
||||
/server/
|
||||
|
||||
# projectiles files
|
||||
.projectile
|
||||
|
||||
# directory configuration
|
||||
.dir-locals.el
|
||||
|
||||
# network security
|
||||
/network-security.data
|
||||
|
||||
|
||||
### Git ###
|
||||
# Created by git for backups. To disable backups in Git:
|
||||
# $ git config --global mergetool.keepBackup false
|
||||
*.orig
|
||||
|
||||
# Created by git when using merge tools for conflicts
|
||||
*.BACKUP.*
|
||||
*.BASE.*
|
||||
*.LOCAL.*
|
||||
*.REMOTE.*
|
||||
*_BACKUP_*.txt
|
||||
*_BASE_*.txt
|
||||
*_LOCAL_*.txt
|
||||
*_REMOTE_*.txt
|
||||
|
||||
### Python ###
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
pip-wheel-metadata/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
target/
|
||||
|
||||
# pyenv
|
||||
.python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# celery beat schedule file
|
||||
celerybeat-schedule
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# Mr Developer
|
||||
.mr.developer.cfg
|
||||
.project
|
||||
.pydevproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
#project settings
|
||||
sahli-venv/*
|
||||
*.wp*
|
||||
160
editor.py
Normal file
160
editor.py
Normal file
|
|
@ -0,0 +1,160 @@
|
|||
#!/usr/bin/env python
|
||||
# coding:utf-8
|
||||
"""
|
||||
Author: Sir Garbagetruck --<truck@notonfire.somewhere>
|
||||
Purpose: make editing the list.sahli file easier
|
||||
Created: 2020/04/09
|
||||
"""
|
||||
|
||||
import json
|
||||
import argparse
|
||||
import os
|
||||
from sauce import SAUCE
|
||||
from PIL import Image
|
||||
from sahliEditorPython import sahlifile as SF
|
||||
|
||||
|
||||
def getfilesindir(directory):
|
||||
"""return the files in a directory as an array"""
|
||||
for root, dirs, files, rootfd in os.fwalk(directory):
|
||||
return files
|
||||
|
||||
|
||||
def getfilenames(filedata):
|
||||
"""return the file names from a sahli filedata array"""
|
||||
f = []
|
||||
for i in filedata:
|
||||
f.append(i['file'])
|
||||
return f
|
||||
|
||||
|
||||
def getdata(filedata, name):
|
||||
"""get the filedata entry where file = name"""
|
||||
for i in filedata:
|
||||
if i['file'] == name:
|
||||
return i
|
||||
return []
|
||||
|
||||
|
||||
def getpicdata(filename):
|
||||
"""extract picture data from filename"""
|
||||
imagedata = Image.open(filename)
|
||||
picdata = {
|
||||
'width': imagedata.width,
|
||||
'height': imagedata.height
|
||||
}
|
||||
return picdata
|
||||
|
||||
|
||||
def getansidata(filename):
|
||||
"""extract SAUCE data from filename"""
|
||||
saucedata = SAUCE(filename)
|
||||
ansidata = {
|
||||
'author': saucedata.author,
|
||||
'group': saucedata.group,
|
||||
'title': saucedata.title,
|
||||
'filesize': saucedata.filesize,
|
||||
'comments': saucedata.comments,
|
||||
'width': None,
|
||||
'height': None
|
||||
}
|
||||
tinfonames = [saucedata.tinfo1_name,
|
||||
saucedata.tinfo2_name,
|
||||
saucedata.tinfo3_name,
|
||||
saucedata.tinfo4_name]
|
||||
tinfo = [saucedata.tinfo1,
|
||||
saucedata.tinfo2,
|
||||
saucedata.tinfo3,
|
||||
saucedata.tinfo4]
|
||||
for i in range(0, 3):
|
||||
if tinfonames[i] == 'width':
|
||||
ansidata['width'] = tinfo[i]
|
||||
if tinfonames[i] == 'height':
|
||||
ansidata['height'] = tinfo[i]
|
||||
# print(tinfonames[i])
|
||||
return ansidata
|
||||
|
||||
|
||||
def getamigadata(filename):
|
||||
"""try to get some form of info from file (:"""
|
||||
with open(filename, encoding='latin1') as f:
|
||||
ascii = f.readlines()
|
||||
width = 0
|
||||
for i in ascii:
|
||||
if len(i) > width:
|
||||
width = len(i)
|
||||
return {'height': len(ascii),
|
||||
'width': width}
|
||||
|
||||
|
||||
def main(args):
|
||||
"""maintain a list.sahli file"""
|
||||
if args.new:
|
||||
mysahli = SF.sahlifile(None)
|
||||
else:
|
||||
mysahli = SF.sahlifile(args.filename)
|
||||
mysahli.sahli['location'] = args.directory
|
||||
files = getfilesindir(args.directory)
|
||||
filedata = mysahli.sahli['filedata']
|
||||
filedatanames = getfilenames(filedata)
|
||||
newdata = []
|
||||
for i in files:
|
||||
dirfile = '{}/{}'.format(args.directory, i)
|
||||
if i in filedatanames:
|
||||
print('found! {}'.format(i))
|
||||
# todo: _if_ I ever make this a non-preparser, then... futz with
|
||||
a = getansidata(dirfile)
|
||||
newdata.append(getdata(filedata, i))
|
||||
else:
|
||||
print('not found! {}'.format(i))
|
||||
suf = i.split('.')[-1]
|
||||
if suf in ['png', 'jpg', 'jpeg', 'gif',
|
||||
'PNG', 'JPG', 'JPEG', 'GIF']:
|
||||
stuff = getpicdata(dirfile)
|
||||
entry = mysahli.blank_picture()
|
||||
entry['width'] = stuff['width']
|
||||
entry['height'] = stuff['height']
|
||||
entry['file'] = i
|
||||
entry['name'] = i
|
||||
newdata.append(entry)
|
||||
elif suf in ['ans', 'ANS', 'BIN', 'bin', 'XB', 'xb']:
|
||||
stuff = getansidata(dirfile)
|
||||
entry = mysahli.blank_ansi()
|
||||
entry['file'] = i
|
||||
entry['name'] = stuff['title']
|
||||
entry['author'] = '{}/{}'.format(
|
||||
stuff['author'], stuff['group'])
|
||||
entry['text'] = stuff['comments']
|
||||
if stuff['height'] is not None:
|
||||
entry['height'] = stuff['height']
|
||||
if stuff['width'] is not None:
|
||||
entry['width'] = stuff['width']
|
||||
newdata.append(entry)
|
||||
elif suf in ['TXT', 'ASC', 'txt', 'asc',
|
||||
'NFO', 'nfo', 'diz', 'DIZ']:
|
||||
stuff = getamigadata(dirfile)
|
||||
entry = mysahli.blank_amiga_ascii()
|
||||
entry['name'] = i
|
||||
# entry['title'] = i
|
||||
# entry['height'] = stuff['height']
|
||||
entry['file'] = i
|
||||
newdata.append(entry)
|
||||
else:
|
||||
print("dunno what type of file this is...")
|
||||
mysahli.sahli['filedata'] = newdata
|
||||
out = json.dumps(mysahli.sahli, sort_keys=False, indent=4)
|
||||
if args.outfile == '>stdout':
|
||||
print(out)
|
||||
else:
|
||||
with open(args.outfile, 'w') as f:
|
||||
json.dump(mysahli.sahli, f, sort_keys=False, indent=4)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
ap = argparse.ArgumentParser()
|
||||
ap.add_argument('-f', '--filename', default='list.sahli')
|
||||
ap.add_argument('-n', '--new', action='store_true')
|
||||
ap.add_argument('-o', '--outfile', type=str, default='>stdout')
|
||||
ap.add_argument('-d', '--directory', type=str, required=True,
|
||||
help='directory where compo files are')
|
||||
main(ap.parse_args())
|
||||
0
sahliEditorPython/__init__.py
Normal file
0
sahliEditorPython/__init__.py
Normal file
130
sahliEditorPython/sahlifile.py
Normal file
130
sahliEditorPython/sahlifile.py
Normal file
|
|
@ -0,0 +1,130 @@
|
|||
#!/usr/bin/env python
|
||||
# coding:utf-8
|
||||
"""
|
||||
Author: Sir Garbagetruck --<truck@whatever>
|
||||
Purpose: base class for Sahli file
|
||||
Created: 2020/04/09
|
||||
"""
|
||||
|
||||
import json
|
||||
########################################################################
|
||||
|
||||
|
||||
class sahlifile:
|
||||
"""the Sahli file structure and classes to futz with"""
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
def __init__(self, filename):
|
||||
"""Constructor"""
|
||||
self.valid_filetypes = [
|
||||
"plain",
|
||||
"ansi",
|
||||
"xbin",
|
||||
"ice",
|
||||
"adf",
|
||||
"avatar",
|
||||
"bin",
|
||||
"idf",
|
||||
"pcboard",
|
||||
"tundra"
|
||||
]
|
||||
self.valid_fonts = [
|
||||
'Propaz', 'ansifont', 'mOsOul', 'Microknight', 'p0t-nOodle'
|
||||
]
|
||||
if filename is not None:
|
||||
with open(filename) as f:
|
||||
self.sahli = json.load(f)
|
||||
else:
|
||||
location = self.blank_location()
|
||||
slides = self.blank_slides()
|
||||
filedata = []
|
||||
self.sahli = {
|
||||
'location': location,
|
||||
'slides': slides,
|
||||
'filedata': filedata
|
||||
}
|
||||
|
||||
def blank_slides(self):
|
||||
"""blank slide structure"""
|
||||
slides = {
|
||||
'background': '',
|
||||
'template': '',
|
||||
'css': ''
|
||||
}
|
||||
return slides
|
||||
|
||||
def blank_location(self):
|
||||
"""blank location structure"""
|
||||
return ''
|
||||
|
||||
def blank_picture(self):
|
||||
"""Blank picture structure"""
|
||||
return {
|
||||
'file': '',
|
||||
'name': '',
|
||||
'amiga': False,
|
||||
'filetype': 'image',
|
||||
'width': '1600',
|
||||
'author': '',
|
||||
'font': 'Propaz',
|
||||
'color': [0, 0, 0, 255],
|
||||
'bg': [255, 255, 255, 255],
|
||||
'line1': '',
|
||||
'line2': '',
|
||||
'text': ''
|
||||
}
|
||||
# ----------------------------------------------------------------------
|
||||
|
||||
def blank_amiga_ascii(self):
|
||||
"""blank amiga ascii"""
|
||||
return {
|
||||
'file': '',
|
||||
'name': '',
|
||||
'amiga': True,
|
||||
'filetype': 'plain',
|
||||
'width': '80',
|
||||
'author': '',
|
||||
'font': 'Propaz',
|
||||
'color': [250, 250, 250, 255],
|
||||
'bg': [0, 0, 0, 255],
|
||||
'line1': '',
|
||||
'line2': '',
|
||||
'text': ''
|
||||
}
|
||||
# ----------------------------------------------------------------------
|
||||
|
||||
def blank_ansi(self):
|
||||
"""blank PC Ansi"""
|
||||
return {
|
||||
'file': '',
|
||||
'name': '',
|
||||
'amiga': False,
|
||||
'filetype': 'ansi',
|
||||
'width': '80',
|
||||
'author': '',
|
||||
'font': 'Propaz',
|
||||
'color': [255, 255, 255, 255],
|
||||
'bg': [0, 0, 0, 255],
|
||||
'line1': '',
|
||||
'line2': '',
|
||||
'text': ''
|
||||
}
|
||||
|
||||
def blank_filedata(self):
|
||||
"""Blank filedata structure"""
|
||||
|
||||
filedata = {
|
||||
'file': '',
|
||||
'name': '',
|
||||
'amiga': False,
|
||||
'filetype': 'image',
|
||||
'width': '',
|
||||
'author': '',
|
||||
'font': 'Propaz',
|
||||
'color': [0, 0, 0, 255],
|
||||
'bg': [255, 255, 255, 255],
|
||||
'line1': '',
|
||||
'line2': '',
|
||||
'text': ''
|
||||
}
|
||||
return filedata
|
||||
510
sauce.py
Normal file
510
sauce.py
Normal file
|
|
@ -0,0 +1,510 @@
|
|||
#! /usr/bin/env python
|
||||
#
|
||||
# _______
|
||||
# ____________ _______ _\__ /_________ ___ _____
|
||||
# | _ _ \ _ | ____\ _ / | |/ _ \
|
||||
# | / / / / | | | /___/ _ | | / /
|
||||
# |___/___/ /___/____|________|___ | |_| |___|_____/
|
||||
# \__/ |___|
|
||||
#
|
||||
# (c) 2006-2012 Wijnand Modderman-Lenstra - https://maze.io/
|
||||
#
|
||||
|
||||
'''
|
||||
Parser for SAUCE or Standard Architecture for Universal Comment Extensions.
|
||||
'''
|
||||
|
||||
__author__ = 'Wijnand Modderman-Lenstra <maze@pyth0n.org>'
|
||||
__copyright__ = '(C) 2006-2012 Wijnand Modderman-Lenstra'
|
||||
__license__ = 'LGPL'
|
||||
__version__ = '1.2'
|
||||
__url__ = 'https://github.com/tehmaze/sauce'
|
||||
|
||||
import datetime
|
||||
import os
|
||||
import struct
|
||||
try:
|
||||
from io import StringIO
|
||||
except ImportError:
|
||||
from io import StringIO
|
||||
|
||||
|
||||
class SAUCE(object):
|
||||
'''
|
||||
Parser for SAUCE or Standard Architecture for Universal Comment Extensions,
|
||||
as defined in http://www.acid.org/info/sauce/s_spec.htm.
|
||||
|
||||
:param filename: file name or file handle
|
||||
:property author: Name or 'handle' of the creator of the file
|
||||
:property datatype: Type of data
|
||||
:property date: Date the file was created
|
||||
:property filesize: Original filesize NOT including any information of
|
||||
SAUCE
|
||||
:property group: Name of the group/company the creator is employed by
|
||||
:property title: Title of the file
|
||||
|
||||
Example::
|
||||
|
||||
>>> art = open('31337.ANS', 'rb')
|
||||
>>> nfo = sauce.SAUCE(art)
|
||||
>>> nfo.author
|
||||
'maze'
|
||||
...
|
||||
>>> nfo.group
|
||||
''
|
||||
>>> nfo.group = 'mononoke'
|
||||
>>> raw = str(nfo)
|
||||
|
||||
Saving the new file::
|
||||
|
||||
>>> sav = open('31337.NEW', 'wb')
|
||||
>>> nfo.write(sav)
|
||||
>>> # OR you can do:
|
||||
>>> sav = nfo.write('31337.NEW')
|
||||
|
||||
'''
|
||||
|
||||
# template
|
||||
template = (
|
||||
# name default size type
|
||||
('SAUCE', 'SAUCE', 5, '5s'),
|
||||
('SAUCEVersion', '00', 2, '2s'),
|
||||
('Title', '\x00' * 35, 35, '35s'),
|
||||
('Author', '\x00' * 20, 20, '20s'),
|
||||
('Group', '\x00' * 20, 20, '20s'),
|
||||
('Date', '\x00' * 8, 8, '8s'),
|
||||
('FileSize', [0], 4, 'I'),
|
||||
('DataType', [0], 1, 'B'),
|
||||
('FileType', [0], 1, 'B'),
|
||||
('TInfo1', [0], 2, 'H'),
|
||||
('TInfo2', [0], 2, 'H'),
|
||||
('TInfo3', [0], 2, 'H'),
|
||||
('TInfo4', [0], 2, 'H'),
|
||||
('Comments', [0], 1, 'B'),
|
||||
('Flags', [0], 1, 'B'),
|
||||
('Filler', ['\x00'] * 22, 22, '22c'),
|
||||
)
|
||||
templates = [t[0] for t in template]
|
||||
datatypes = ['None', 'Character', 'Graphics', 'Vector', 'Sound',
|
||||
'BinaryText', 'XBin', 'Archive', 'Executable']
|
||||
filetypes = {
|
||||
'None': {
|
||||
'filetype': ['Undefined'],
|
||||
},
|
||||
'Character': {
|
||||
'filetype': ['ASCII', 'ANSi', 'ANSiMation', 'RIP', 'PCBoard',
|
||||
'Avatar', 'HTML', 'Source'],
|
||||
'flags': {0: 'None', 1: 'iCE Color'},
|
||||
'tinfo': (
|
||||
('width', 'height', None, None),
|
||||
('width', 'height', None, None),
|
||||
('width', 'height', None, None),
|
||||
('width', 'height', 'colors', None),
|
||||
('width', 'height', None, None),
|
||||
('width', 'height', None, None),
|
||||
(None, None, None, None),
|
||||
),
|
||||
},
|
||||
'Graphics': {
|
||||
'filetype': ['GIF', 'PCX', 'LBM/IFF', 'TGA', 'FLI', 'FLC',
|
||||
'BMP', 'GL', 'DL', 'WPG', 'PNG', 'JPG', 'MPG',
|
||||
'AVI'],
|
||||
'tinfo': (('width', 'height', 'bpp')) * 14,
|
||||
},
|
||||
'Vector': {
|
||||
'filetype': ['DX', 'DWG', 'WPG', '3DS'],
|
||||
},
|
||||
'Sound': {
|
||||
'filetype': ['MOD', '669', 'STM', 'S3M', 'MTM', 'FAR', 'ULT',
|
||||
'AMF', 'DMF', 'OKT', 'ROL', 'CMF', 'MIDI', 'SADT',
|
||||
'VOC', 'WAV', 'SMP8', 'SMP8S', 'SMP16', 'SMP16S',
|
||||
'PATCH8', 'PATCH16', 'XM', 'HSC', 'IT'],
|
||||
'tinfo': ((None,)) * 16 + (('Sampling Rate',)) * 4,
|
||||
},
|
||||
'BinaryText': {
|
||||
'flags': {0: 'None', 1: 'iCE Color'},
|
||||
},
|
||||
'XBin': {
|
||||
'tinfo': (('width', 'height'),),
|
||||
},
|
||||
'Archive': {
|
||||
'filetype': ['ZIP', 'ARJ', 'LZH', 'ARC', 'TAR', 'ZOO', 'RAR',
|
||||
'UC2', 'PAK', 'SQZ'],
|
||||
},
|
||||
}
|
||||
|
||||
def __init__(self, filename='', data=''):
|
||||
assert (filename or data), 'Need either filename or record'
|
||||
|
||||
if filename:
|
||||
# if type(filename) == file:
|
||||
# self.filehand = filename
|
||||
# else:
|
||||
self.filehand = open(filename, 'rb')
|
||||
self._size = os.path.getsize(self.filehand.name)
|
||||
else:
|
||||
self._size = len(data)
|
||||
self.filehand = StringIO(data)
|
||||
|
||||
self.record, self.data = self._read()
|
||||
|
||||
def __str__(self):
|
||||
return ''.join(list(self._read_file()))
|
||||
|
||||
def _read_file(self):
|
||||
# Buffered reader (generator), reads the original file without SAUCE
|
||||
# record.
|
||||
self.filehand.seek(0)
|
||||
# Check if we have SAUCE data
|
||||
if self.record:
|
||||
reads, rest = divmod(self._size - 128, 1024)
|
||||
else:
|
||||
reads, rest = divmod(self._size, 1024)
|
||||
for x in range(0, reads):
|
||||
yield self.filehand.read(1024)
|
||||
if rest:
|
||||
yield self.filehand.read(rest)
|
||||
|
||||
def _read(self):
|
||||
if self._size >= 128:
|
||||
self.filehand.seek(self._size - 128)
|
||||
record = self.filehand.read(128)
|
||||
if record.startswith(b'SAUCE'):
|
||||
self.filehand.seek(0)
|
||||
return record, self.filehand.read(self._size - 128)
|
||||
|
||||
self.filehand.seek(0)
|
||||
return None, self.filehand.read()
|
||||
|
||||
def _gets(self, key):
|
||||
if self.record is None:
|
||||
return None
|
||||
|
||||
name, default, offset, size, stype = self._template(key)
|
||||
data = self.record[offset:offset + size]
|
||||
data = struct.unpack(stype, data)
|
||||
if stype[-1] in 'cs':
|
||||
# return ''.join(data)
|
||||
return data[0].decode()
|
||||
elif stype[-1] in 'BI' and len(stype) == 1:
|
||||
return data[0]
|
||||
else:
|
||||
return data
|
||||
|
||||
def _puts(self, key, data):
|
||||
name, default, offset, size, stype = self._template(key)
|
||||
#print offset, size, data, repr(struct.pack(stype, data))
|
||||
if self.record is None:
|
||||
self.record = self.sauce()
|
||||
self.record = ''.join([
|
||||
self.record[:offset],
|
||||
struct.pack(stype, data),
|
||||
self.record[offset + size:]
|
||||
])
|
||||
return self.record
|
||||
|
||||
def _template(self, key):
|
||||
index = self.templates.index(key)
|
||||
name, default, size, stype = self.template[index]
|
||||
offset = sum([self.template[x][2] for x in range(0, index)])
|
||||
return name, default, offset, size, stype
|
||||
|
||||
def sauce(self):
|
||||
'''
|
||||
Get the raw SAUCE record.
|
||||
'''
|
||||
if self.record:
|
||||
return self.record
|
||||
else:
|
||||
data = 'SAUCE'
|
||||
for name, default, size, stype in self.template[1:]:
|
||||
#print stype, default
|
||||
if stype[-1] in 's':
|
||||
data += struct.pack(stype, default)
|
||||
else:
|
||||
data += struct.pack(stype, *default)
|
||||
return data
|
||||
|
||||
def write(self, filename):
|
||||
'''
|
||||
Save the file including SAUCE data to the given file(handle).
|
||||
'''
|
||||
filename = type(filename) == file and filename or open(
|
||||
filename, 'wb')
|
||||
for part in self._read_file():
|
||||
filename.write(part)
|
||||
filename.write(self.sauce())
|
||||
return filename
|
||||
|
||||
# SAUCE meta data
|
||||
|
||||
def get_author(self):
|
||||
astr = self._gets('Author')
|
||||
if astr is not None:
|
||||
return astr.strip()
|
||||
else:
|
||||
return ''
|
||||
|
||||
def set_author(self, author):
|
||||
self._puts('Author', author)
|
||||
return self
|
||||
|
||||
def get_comments(self):
|
||||
return self._gets('Comments')
|
||||
|
||||
def set_comments(self, comments):
|
||||
self._puts('Comments', comments)
|
||||
return self
|
||||
|
||||
def get_datatype(self):
|
||||
return self._gets('DataType')
|
||||
|
||||
def get_datatype_str(self):
|
||||
datatype = self.datatype
|
||||
if datatype is None:
|
||||
return None
|
||||
if datatype < len(self.datatypes):
|
||||
return self.datatypes[datatype]
|
||||
else:
|
||||
return None
|
||||
|
||||
def set_datatype(self, datatype):
|
||||
if type(datatype) == str:
|
||||
datatype = datatype.lower().title() # fOoBAR -> Foobar
|
||||
datatype = self.datatypes.index(datatype)
|
||||
self._puts('DataType', datatype)
|
||||
return self
|
||||
|
||||
def get_date(self):
|
||||
return self._gets('Date')
|
||||
|
||||
def get_date_str(self, format='%Y%m%d'):
|
||||
return datetime.datetime.strptime(self.date, format)
|
||||
|
||||
def set_date(self, date=None, format='%Y%m%d'):
|
||||
if date is None:
|
||||
date = datetime.datetime.now().strftime(format)
|
||||
elif type(date) in [datetime.date, datetime.datetime]:
|
||||
date = date.strftime(format)
|
||||
elif type(date) in [int, int, float]:
|
||||
date = datetime.datetime.fromtimestamp(date).strftime(format)
|
||||
self._puts('Date', date)
|
||||
return self
|
||||
|
||||
def get_filesize(self):
|
||||
return self._gets('FileSize')
|
||||
|
||||
def set_filesize(self, size):
|
||||
self._puts('FileSize', size)
|
||||
|
||||
def get_filler(self):
|
||||
return self._gets('Filler')
|
||||
|
||||
def get_filler_str(self):
|
||||
filler = self._gets('Filler')
|
||||
if filler is None:
|
||||
return ''
|
||||
else:
|
||||
return filler.rstrip('\x00')
|
||||
|
||||
def get_filetype(self):
|
||||
return self._gets('FileType')
|
||||
|
||||
def get_filetype_str(self):
|
||||
datatype = self.datatype_str
|
||||
filetype = self.filetype
|
||||
|
||||
if datatype is None or filetype is None:
|
||||
return None
|
||||
|
||||
if datatype in self.filetypes and \
|
||||
'filetype' in self.filetypes[datatype] and \
|
||||
filetype < len(self.filetypes[datatype]['filetype']):
|
||||
return self.filetypes[datatype]['filetype'][filetype]
|
||||
else:
|
||||
return None
|
||||
|
||||
def set_filetype(self, filetype):
|
||||
datatype = self.datatype_str
|
||||
if type(filetype) == str:
|
||||
filetype = filetype.lower().title() # fOoBAR -> Foobar
|
||||
filetype = [name.lower().title()
|
||||
for name in self.filetypes[datatype]['filetype']].index(filetype)
|
||||
self._puts('FileType', filetype)
|
||||
return self
|
||||
|
||||
def get_flags(self):
|
||||
return self._gets('Flags')
|
||||
|
||||
def set_flags(self, flags):
|
||||
self._puts('Flags', flags)
|
||||
return self
|
||||
|
||||
def get_flags_str(self):
|
||||
datatype = self.datatype_str
|
||||
filetype = self.filetype
|
||||
|
||||
if datatype is None or filetype is None:
|
||||
return None
|
||||
|
||||
if datatype in self.filetypes and \
|
||||
'flags' in self.filetypes[datatype] and \
|
||||
filetype < len(self.filetypes[datatype]['filetype']):
|
||||
return self.filetypes[datatype]['filetype'][filetype]
|
||||
else:
|
||||
return None
|
||||
|
||||
def get_group(self):
|
||||
gstr = self._gets('Group')
|
||||
if gstr is not None:
|
||||
return gstr.strip()
|
||||
else:
|
||||
return ''
|
||||
# return self._gets('Group').strip()
|
||||
|
||||
def set_group(self, group):
|
||||
self._puts('Group', group)
|
||||
return self
|
||||
|
||||
def _get_tinfo_name(self, i):
|
||||
datatype = self.datatype_str
|
||||
filetype = self.filetype
|
||||
|
||||
if datatype is None or filetype is None:
|
||||
return None
|
||||
|
||||
try:
|
||||
return self.filetypes[datatype]['tinfo'][filetype][i - 1]
|
||||
except (KeyError, IndexError):
|
||||
return ''
|
||||
|
||||
def get_tinfo1(self):
|
||||
tinfo = self._gets('TInfo1')
|
||||
if tinfo is not None:
|
||||
return tinfo[0]
|
||||
else:
|
||||
return ''
|
||||
|
||||
def get_tinfo1_name(self):
|
||||
return self._get_tinfo_name(1)
|
||||
|
||||
def set_tinfo1(self, tinfo):
|
||||
self._puts('TInfo1', tinfo)
|
||||
return self
|
||||
|
||||
def get_tinfo2(self):
|
||||
tinfo = self._gets('TInfo2')
|
||||
if tinfo is not None:
|
||||
return tinfo[0]
|
||||
else:
|
||||
return ''
|
||||
|
||||
def get_tinfo2_name(self):
|
||||
return self._get_tinfo_name(2)
|
||||
|
||||
def set_tinfo2(self, tinfo):
|
||||
self._puts('TInfo2', tinfo)
|
||||
return self
|
||||
|
||||
def get_tinfo3(self):
|
||||
tinfo = self._gets('TInfo3')
|
||||
if tinfo is not None:
|
||||
return tinfo[0]
|
||||
return ''
|
||||
|
||||
def get_tinfo3_name(self):
|
||||
return self._get_tinfo_name(3)
|
||||
|
||||
def set_tinfo3(self, tinfo):
|
||||
self._puts('TInfo3', tinfo)
|
||||
return self
|
||||
|
||||
def get_tinfo4(self):
|
||||
tinfo = self._gets('TInfo4')
|
||||
if tinfo is not None:
|
||||
return tinfo[0]
|
||||
return ''
|
||||
|
||||
def get_tinfo4_name(self):
|
||||
return self._get_tinfo_name(4)
|
||||
|
||||
def set_tinfo4(self, tinfo):
|
||||
self._puts('TInfo4', tinfo)
|
||||
return self
|
||||
|
||||
def get_title(self):
|
||||
tstr = self._gets('Title')
|
||||
if tstr is not None:
|
||||
return tstr.strip()
|
||||
else:
|
||||
return ''
|
||||
# return self._gets('Title').strip()
|
||||
|
||||
def set_title(self, title):
|
||||
self._puts('Title', title)
|
||||
return self
|
||||
|
||||
def get_version(self):
|
||||
return self._gets('SAUCEVersion')
|
||||
|
||||
def set_version(self, version):
|
||||
self._puts('SAUCEVersion', version)
|
||||
return self
|
||||
|
||||
# properties
|
||||
author = property(get_author, set_author)
|
||||
comments = property(get_comments, set_comments)
|
||||
datatype = property(get_datatype, set_datatype)
|
||||
datatype_str = property(get_datatype_str)
|
||||
date = property(get_date, set_date)
|
||||
filesize = property(get_filesize, set_filesize)
|
||||
filetype = property(get_filetype, set_filetype)
|
||||
filetype_str = property(get_filetype_str)
|
||||
filler = property(get_filler)
|
||||
filler_str = property(get_filler_str)
|
||||
flags = property(get_flags, set_flags)
|
||||
flags_str = property(get_flags_str)
|
||||
group = property(get_group, set_group)
|
||||
tinfo1 = property(get_tinfo1, set_tinfo1)
|
||||
tinfo1_name = property(get_tinfo1_name)
|
||||
tinfo2 = property(get_tinfo2, set_tinfo2)
|
||||
tinfo2_name = property(get_tinfo2_name)
|
||||
tinfo3 = property(get_tinfo3, set_tinfo3)
|
||||
tinfo3_name = property(get_tinfo3_name)
|
||||
tinfo4 = property(get_tinfo4, set_tinfo4)
|
||||
tinfo4_name = property(get_tinfo4_name)
|
||||
title = property(get_title, set_title)
|
||||
version = property(get_version)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
import sys
|
||||
if len(sys.argv) != 2:
|
||||
print('%s <file>' % (sys.argv[0],), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
else:
|
||||
test = SAUCE(sys.argv[1])
|
||||
|
||||
def show(sauce):
|
||||
print('Version.:', sauce.version)
|
||||
print('Title...:', sauce.title)
|
||||
print('Author..:', sauce.author)
|
||||
print('Group...:', sauce.group)
|
||||
print('Date....:', sauce.date)
|
||||
print('FileSize:', sauce.filesize)
|
||||
print('DataType:', sauce.datatype, sauce.datatype_str)
|
||||
print('FileType:', sauce.filetype, sauce.filetype_str)
|
||||
print('TInfo1..:', sauce.tinfo1)
|
||||
print('TInfo2..:', sauce.tinfo2)
|
||||
print('TInfo3..:', sauce.tinfo3)
|
||||
print('TInfo4..:', sauce.tinfo4)
|
||||
print('Flags...:', sauce.flags, sauce.flags_str)
|
||||
print('Record..:', len(sauce.record), repr(sauce.record))
|
||||
print('Filler..:', sauce.filler_str)
|
||||
|
||||
if test.record:
|
||||
show(test)
|
||||
else:
|
||||
print('No SAUCE record found')
|
||||
test = SAUCE(data=test.sauce())
|
||||
show(test)
|
||||
15
setup.py
Normal file
15
setup.py
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
#!/usr/bin/env python
|
||||
# coding:utf-8
|
||||
"""
|
||||
Author: Sir Garbagetruck --<truck@whatever>
|
||||
Purpose: setup script for Sahli editor tools
|
||||
Created: 2020/04/09
|
||||
"""
|
||||
|
||||
from setuptools import setup, find_packages
|
||||
|
||||
setup(
|
||||
name="SahliEditor",
|
||||
version="0.1",
|
||||
packages=find_packages()
|
||||
)
|
||||
Loading…
Add table
Add a link
Reference in a new issue