aboutsummaryrefslogtreecommitdiff
path: root/cgi
diff options
context:
space:
mode:
authorLibravatar Renard 2020-03-29 18:43:36 -0300
committerLibravatar Renard 2020-03-29 18:43:36 -0300
commit56c690b9efdb009ab44f3112b6c301d7d393f07e (patch)
treeb2a28666888df9b60b46b6d1c59dd3818437b405 /cgi
parent775ef3e6291c5ad6bff68a12f6ca81c8663da3dc (diff)
downloadweabot-56c690b9efdb009ab44f3112b6c301d7d393f07e.tar.gz
weabot-56c690b9efdb009ab44f3112b6c301d7d393f07e.tar.xz
weabot-56c690b9efdb009ab44f3112b6c301d7d393f07e.zip
Formateo de python con pep8
Diffstat (limited to 'cgi')
-rw-r--r--cgi/BeautifulSoup.py480
-rw-r--r--cgi/anarkia.py854
-rw-r--r--cgi/api.py822
-rw-r--r--cgi/database.py102
-rw-r--r--cgi/fcgi.py137
-rw-r--r--cgi/formatting.py784
-rw-r--r--cgi/framework.py815
-rw-r--r--cgi/geoip.py5
-rw-r--r--cgi/img.py792
-rw-r--r--cgi/manage.py4002
-rw-r--r--cgi/markdown.py337
-rw-r--r--cgi/oekaki.py401
-rw-r--r--cgi/template.py210
-rw-r--r--cgi/tenjin.py839
-rwxr-xr-xcgi/weabot.py2014
15 files changed, 6700 insertions, 5894 deletions
diff --git a/cgi/BeautifulSoup.py b/cgi/BeautifulSoup.py
index 7278215..3e97785 100644
--- a/cgi/BeautifulSoup.py
+++ b/cgi/BeautifulSoup.py
@@ -90,26 +90,28 @@ import types
import re
import sgmllib
try:
- from htmlentitydefs import name2codepoint
+ from htmlentitydefs import name2codepoint
except ImportError:
- name2codepoint = {}
+ name2codepoint = {}
try:
set
except NameError:
from sets import Set as set
-#These hacks make Beautiful Soup able to parse XML with namespaces
+# These hacks make Beautiful Soup able to parse XML with namespaces
sgmllib.tagfind = re.compile('[a-zA-Z][-_.:a-zA-Z0-9]*')
markupbase._declname_match = re.compile(r'[a-zA-Z][-_.:a-zA-Z0-9]*\s*').match
DEFAULT_OUTPUT_ENCODING = "utf-8"
+
def _match_css_class(str):
"""Build a RE to match the given CSS class."""
return re.compile(r"(^|.*\s)%s($|\s)" % str)
# First, the classes that represent markup elements.
+
class PageElement(object):
"""Contains the navigational information for some part of the page
(either a tag or a piece of text)"""
@@ -117,15 +119,15 @@ class PageElement(object):
def _invert(h):
"Cheap function to invert a hash."
i = {}
- for k,v in h.items():
+ for k, v in h.items():
i[v] = k
return i
- XML_ENTITIES_TO_SPECIAL_CHARS = { "apos" : "'",
- "quot" : '"',
- "amp" : "&",
- "lt" : "<",
- "gt" : ">" }
+ XML_ENTITIES_TO_SPECIAL_CHARS = {"apos": "'",
+ "quot": '"',
+ "amp": "&",
+ "lt": "<",
+ "gt": ">"}
XML_SPECIAL_CHARS_TO_ENTITIES = _invert(XML_ENTITIES_TO_SPECIAL_CHARS)
@@ -145,7 +147,7 @@ class PageElement(object):
oldParent = self.parent
myIndex = self.parent.index(self)
if hasattr(replaceWith, "parent")\
- and replaceWith.parent is self.parent:
+ and replaceWith.parent is self.parent:
# We're replacing this element with one of its siblings.
index = replaceWith.parent.index(replaceWith)
if index and index < myIndex:
@@ -173,9 +175,9 @@ class PageElement(object):
except ValueError:
pass
- #Find the two elements that would be next to each other if
- #this element (and any children) hadn't been parsed. Connect
- #the two.
+ # Find the two elements that would be next to each other if
+ # this element (and any children) hadn't been parsed. Connect
+ # the two.
lastChild = self._lastRecursiveChild()
nextElement = lastChild.next
@@ -203,10 +205,10 @@ class PageElement(object):
def insert(self, position, newChild):
if isinstance(newChild, basestring) \
- and not isinstance(newChild, NavigableString):
+ and not isinstance(newChild, NavigableString):
newChild = NavigableString(newChild)
- position = min(position, len(self.contents))
+ position = min(position, len(self.contents))
if hasattr(newChild, 'parent') and newChild.parent is not None:
# We're 'inserting' an element that's already one
# of this object's children.
@@ -243,7 +245,7 @@ class PageElement(object):
while not parentsNextSibling:
parentsNextSibling = parent.nextSibling
parent = parent.parent
- if not parent: # This is the last element in the document.
+ if not parent: # This is the last element in the document.
break
if parentsNextSibling:
newChildsLastElement.next = parentsNextSibling
@@ -288,7 +290,7 @@ class PageElement(object):
criteria and appear after this Tag in the document."""
return self._findAll(name, attrs, text, limit,
self.nextSiblingGenerator, **kwargs)
- fetchNextSiblings = findNextSiblings # Compatibility with pre-3.x
+ fetchNextSiblings = findNextSiblings # Compatibility with pre-3.x
def findPrevious(self, name=None, attrs={}, text=None, **kwargs):
"""Returns the first item that matches the given criteria and
@@ -300,8 +302,8 @@ class PageElement(object):
"""Returns all items that match the given criteria and appear
before this Tag in the document."""
return self._findAll(name, attrs, text, limit, self.previousGenerator,
- **kwargs)
- fetchPrevious = findAllPrevious # Compatibility with pre-3.x
+ **kwargs)
+ fetchPrevious = findAllPrevious # Compatibility with pre-3.x
def findPreviousSibling(self, name=None, attrs={}, text=None, **kwargs):
"""Returns the closest sibling to this Tag that matches the
@@ -315,7 +317,7 @@ class PageElement(object):
criteria and appear before this Tag in the document."""
return self._findAll(name, attrs, text, limit,
self.previousSiblingGenerator, **kwargs)
- fetchPreviousSiblings = findPreviousSiblings # Compatibility with pre-3.x
+ fetchPreviousSiblings = findPreviousSiblings # Compatibility with pre-3.x
def findParent(self, name=None, attrs={}, **kwargs):
"""Returns the closest parent of this Tag that matches the given
@@ -334,9 +336,9 @@ class PageElement(object):
return self._findAll(name, attrs, None, limit, self.parentGenerator,
**kwargs)
- fetchParents = findParents # Compatibility with pre-3.x
+ fetchParents = findParents # Compatibility with pre-3.x
- #These methods do the real heavy lifting.
+ # These methods do the real heavy lifting.
def _findOne(self, method, name, attrs, text, **kwargs):
r = None
@@ -381,8 +383,8 @@ class PageElement(object):
break
return results
- #These Generators can be used to navigate starting from both
- #NavigableStrings and Tags.
+ # These Generators can be used to navigate starting from both
+ # NavigableStrings and Tags.
def nextGenerator(self):
i = self
while i is not None:
@@ -431,7 +433,7 @@ class PageElement(object):
s = unicode(s)
else:
if encoding:
- s = self.toEncoding(str(s), encoding)
+ s = self.toEncoding(str(s), encoding)
else:
s = unicode(s)
return s
@@ -483,11 +485,13 @@ class NavigableString(unicode, PageElement):
else:
return data
+
class CData(NavigableString):
def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING):
return "<![CDATA[%s]]>" % NavigableString.__str__(self, encoding)
+
class ProcessingInstruction(NavigableString):
def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING):
output = self
@@ -495,14 +499,17 @@ class ProcessingInstruction(NavigableString):
output = self.substituteEncoding(output, encoding)
return "<?%s?>" % self.toEncoding(output, encoding)
+
class Comment(NavigableString):
def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING):
return "<!--%s-->" % NavigableString.__str__(self, encoding)
+
class Declaration(NavigableString):
def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING):
return "<!%s>" % NavigableString.__str__(self, encoding)
+
class Tag(PageElement):
"""Represents a found HTML tag with its attributes and contents."""
@@ -555,15 +562,15 @@ class Tag(PageElement):
self.escapeUnrecognizedEntities = parser.escapeUnrecognizedEntities
# Convert any HTML, XML, or numeric entities in the attribute values.
- convert = lambda(k, val): (k,
- re.sub("&(#\d+|#x[0-9a-fA-F]+|\w+);",
- self._convertEntities,
- val))
+ def convert((k, val)): return (k,
+ re.sub("&(#\d+|#x[0-9a-fA-F]+|\w+);",
+ self._convertEntities,
+ val))
self.attrs = map(convert, self.attrs)
def getString(self):
if (len(self.contents) == 1
- and isinstance(self.contents[0], NavigableString)):
+ and isinstance(self.contents[0], NavigableString)):
return self.contents[0]
def setString(self, string):
@@ -646,8 +653,8 @@ class Tag(PageElement):
for item in self.attrs:
if item[0] == key:
self.attrs.remove(item)
- #We don't break because bad HTML can define the same
- #attribute multiple times.
+ # We don't break because bad HTML can define the same
+ # attribute multiple times.
self._getAttrMap()
if self.attrMap.has_key(key):
del self.attrMap[key]
@@ -659,7 +666,7 @@ class Tag(PageElement):
return apply(self.findAll, args, kwargs)
def __getattr__(self, tag):
- #print "Getattr %s.%s" % (self.__class__, tag)
+ # print "Getattr %s.%s" % (self.__class__, tag)
if len(tag) > 3 and tag.rfind('Tag') == len(tag)-3:
return self.find(tag[:-3])
elif tag.find('__') != 0:
@@ -738,7 +745,8 @@ class Tag(PageElement):
# value might also contain angle brackets, or
# ampersands that aren't part of entities. We need
# to escape those to XML entities too.
- val = self.BARE_AMPERSAND_OR_BRACKET.sub(self._sub_entity, val)
+ val = self.BARE_AMPERSAND_OR_BRACKET.sub(
+ self._sub_entity, val)
attrs.append(fmt % (self.toEncoding(key, encoding),
self.toEncoding(val, encoding)))
@@ -802,7 +810,7 @@ class Tag(PageElement):
prettyPrint=False, indentLevel=0):
"""Renders the contents of this tag as a string in the given
encoding. If encoding is None, returns a Unicode string.."""
- s=[]
+ s = []
for c in self:
text = None
if isinstance(c, NavigableString):
@@ -819,7 +827,7 @@ class Tag(PageElement):
s.append("\n")
return ''.join(s)
- #Soup methods
+ # Soup methods
def find(self, name=None, attrs={}, recursive=True, text=None,
**kwargs):
@@ -859,7 +867,7 @@ class Tag(PageElement):
def firstText(self, text=None, recursive=True):
return self.find(text=text, recursive=recursive)
- #Private methods
+ # Private methods
def _getAttrMap(self):
"""Initializes a map representation of this tag's attributes,
@@ -870,7 +878,7 @@ class Tag(PageElement):
self.attrMap[key] = value
return self.attrMap
- #Generator methods
+ # Generator methods
def childGenerator(self):
# Just use the iterator from the contents
return iter(self.contents)
@@ -917,12 +925,12 @@ class SoupStrainer:
markup = markupName
markupAttrs = markup
callFunctionWithTagData = callable(self.name) \
- and not isinstance(markupName, Tag)
+ and not isinstance(markupName, Tag)
if (not self.name) \
- or callFunctionWithTagData \
- or (markup and self._matches(markup, self.name)) \
- or (not markup and self._matches(markupName, self.name)):
+ or callFunctionWithTagData \
+ or (markup and self._matches(markup, self.name)) \
+ or (not markup and self._matches(markupName, self.name)):
if callFunctionWithTagData:
match = self.name(markupName, markupAttrs)
else:
@@ -930,11 +938,11 @@ class SoupStrainer:
markupAttrMap = None
for attr, matchAgainst in self.attrs.items():
if not markupAttrMap:
- if hasattr(markupAttrs, 'get'):
+ if hasattr(markupAttrs, 'get'):
markupAttrMap = markupAttrs
- else:
+ else:
markupAttrMap = {}
- for k,v in markupAttrs:
+ for k, v in markupAttrs:
markupAttrMap[k] = v
attrValue = markupAttrMap.get(attr)
if not self._matches(attrValue, matchAgainst):
@@ -948,7 +956,7 @@ class SoupStrainer:
return found
def search(self, markup):
- #print 'looking for %s in %s' % (self, markup)
+ # print 'looking for %s in %s' % (self, markup)
found = None
# If given a list of items, scan it for a text element that
# matches.
@@ -956,7 +964,7 @@ class SoupStrainer:
and not isinstance(markup, Tag):
for element in markup:
if isinstance(element, NavigableString) \
- and self.search(element):
+ and self.search(element):
found = element
break
# If it's a Tag, make sure its name or attributes match.
@@ -966,33 +974,33 @@ class SoupStrainer:
found = self.searchTag(markup)
# If it's text, make sure the text matches.
elif isinstance(markup, NavigableString) or \
- isinstance(markup, basestring):
+ isinstance(markup, basestring):
if self._matches(markup, self.text):
found = markup
else:
raise Exception, "I don't know how to match against a %s" \
- % markup.__class__
+ % markup.__class__
return found
def _matches(self, markup, matchAgainst):
- #print "Matching %s against %s" % (markup, matchAgainst)
+ # print "Matching %s against %s" % (markup, matchAgainst)
result = False
if matchAgainst is True:
result = markup is not None
elif callable(matchAgainst):
result = matchAgainst(markup)
else:
- #Custom match methods take the tag as an argument, but all
- #other ways of matching match the tag name as a string.
+ # Custom match methods take the tag as an argument, but all
+ # other ways of matching match the tag name as a string.
if isinstance(markup, Tag):
markup = markup.name
if markup and not isinstance(markup, basestring):
markup = unicode(markup)
- #Now we know that chunk is either a string, or None.
+ # Now we know that chunk is either a string, or None.
if hasattr(matchAgainst, 'match'):
# It's a regexp object.
result = markup and matchAgainst.search(markup)
- elif hasattr(matchAgainst, '__iter__'): # list-like
+ elif hasattr(matchAgainst, '__iter__'): # list-like
result = markup in matchAgainst
elif hasattr(matchAgainst, 'items'):
result = markup.has_key(matchAgainst)
@@ -1006,15 +1014,18 @@ class SoupStrainer:
result = matchAgainst == markup
return result
+
class ResultSet(list):
"""A ResultSet is just a list that keeps track of the SoupStrainer
that created it."""
+
def __init__(self, source):
list.__init__([])
self.source = source
# Now, some helper functions.
+
def buildTagMap(default, *args):
"""Turns a list of maps, lists, or scalars into a single map.
Used to build the SELF_CLOSING_TAGS, NESTABLE_TAGS, and
@@ -1022,20 +1033,21 @@ def buildTagMap(default, *args):
built = {}
for portion in args:
if hasattr(portion, 'items'):
- #It's a map. Merge it.
- for k,v in portion.items():
+ # It's a map. Merge it.
+ for k, v in portion.items():
built[k] = v
- elif hasattr(portion, '__iter__'): # is a list
- #It's a list. Map each item to the default.
+ elif hasattr(portion, '__iter__'): # is a list
+ # It's a list. Map each item to the default.
for k in portion:
built[k] = default
else:
- #It's a scalar. Map it to the default.
+ # It's a scalar. Map it to the default.
built[portion] = default
return built
# Now, the parser classes.
+
class BeautifulStoneSoup(Tag, SGMLParser):
"""This class contains the basic parser and search code. It defines
@@ -1078,7 +1090,7 @@ class BeautifulStoneSoup(Tag, SGMLParser):
# can be replaced with a single space. A text node that contains
# fancy Unicode spaces (usually non-breaking) should be left
# alone.
- STRIP_ASCII_SPACES = { 9: None, 10: None, 12: None, 13: None, 32: None, }
+ STRIP_ASCII_SPACES = {9: None, 10: None, 12: None, 13: None, 32: None, }
def __init__(self, markup="", parseOnlyThese=None, fromEncoding=None,
markupMassage=True, smartQuotesTo=XML_ENTITIES,
@@ -1155,7 +1167,7 @@ class BeautifulStoneSoup(Tag, SGMLParser):
n = int(name)
except ValueError:
return
- if not 0 <= n <= 127 : # ASCII ends at 127, not 255
+ if not 0 <= n <= 127: # ASCII ends at 127, not 255
return
return self.convert_codepoint(n)
@@ -1166,9 +1178,8 @@ class BeautifulStoneSoup(Tag, SGMLParser):
if not hasattr(self, 'originalEncoding'):
self.originalEncoding = None
else:
- dammit = UnicodeDammit\
- (markup, [self.fromEncoding, inDocumentEncoding],
- smartQuotesTo=self.smartQuotesTo, isHTML=isHTML)
+ dammit = UnicodeDammit(markup, [self.fromEncoding, inDocumentEncoding],
+ smartQuotesTo=self.smartQuotesTo, isHTML=isHTML)
markup = dammit.unicode
self.originalEncoding = dammit.originalEncoding
self.declaredHTMLEncoding = dammit.declaredHTMLEncoding
@@ -1195,10 +1206,10 @@ class BeautifulStoneSoup(Tag, SGMLParser):
def __getattr__(self, methodName):
"""This method routes method call requests to either the SGMLParser
superclass or the Tag superclass, depending on the method name."""
- #print "__getattr__ called on %s.%s" % (self.__class__, methodName)
+ # print "__getattr__ called on %s.%s" % (self.__class__, methodName)
if methodName.startswith('start_') or methodName.startswith('end_') \
- or methodName.startswith('do_'):
+ or methodName.startswith('do_'):
return SGMLParser.__getattr__(self, methodName)
elif not methodName.startswith('__'):
return Tag.__getattr__(self, methodName)
@@ -1209,7 +1220,7 @@ class BeautifulStoneSoup(Tag, SGMLParser):
"""Returns true iff the given string is the name of a
self-closing tag according to this parser."""
return self.SELF_CLOSING_TAGS.has_key(name) \
- or self.instanceSelfClosingTags.has_key(name)
+ or self.instanceSelfClosingTags.has_key(name)
def reset(self):
Tag.__init__(self, self, self.ROOT_TAG_NAME)
@@ -1224,13 +1235,13 @@ class BeautifulStoneSoup(Tag, SGMLParser):
def popTag(self):
tag = self.tagStack.pop()
- #print "Pop", tag.name
+ # print "Pop", tag.name
if self.tagStack:
self.currentTag = self.tagStack[-1]
return self.currentTag
def pushTag(self, tag):
- #print "Push", tag.name
+ # print "Push", tag.name
if self.currentTag:
self.currentTag.contents.append(tag)
self.tagStack.append(tag)
@@ -1248,7 +1259,7 @@ class BeautifulStoneSoup(Tag, SGMLParser):
currentData = ' '
self.currentData = []
if self.parseOnlyThese and len(self.tagStack) <= 1 and \
- (not self.parseOnlyThese.text or \
+ (not self.parseOnlyThese.text or
not self.parseOnlyThese.search(currentData)):
return
o = containerClass(currentData)
@@ -1258,13 +1269,12 @@ class BeautifulStoneSoup(Tag, SGMLParser):
self.previous = o
self.currentTag.contents.append(o)
-
def _popToTag(self, name, inclusivePop=True):
"""Pops the tag stack up to and including the most recent
instance of the given tag. If inclusivePop is false, pops the tag
stack up to but *not* including the most recent instqance of
the given tag."""
- #print "Popping to %s" % name
+ # print "Popping to %s" % name
if name == self.ROOT_TAG_NAME:
return
@@ -1282,7 +1292,6 @@ class BeautifulStoneSoup(Tag, SGMLParser):
return mostRecentTag
def _smartPop(self, name):
-
"""We need to pop up to the previous tag of this type, unless
one of this tag's nesting reset triggers comes between this
tag and the previous tag of this type, OR unless this tag is a
@@ -1307,8 +1316,8 @@ class BeautifulStoneSoup(Tag, SGMLParser):
for i in range(len(self.tagStack)-1, 0, -1):
p = self.tagStack[i]
if (not p or p.name == name) and not isNestable:
- #Non-nestable tags get popped to the top or to their
- #last occurance.
+ # Non-nestable tags get popped to the top or to their
+ # last occurance.
popTo = name
break
if (nestingResetTriggers is not None
@@ -1316,10 +1325,10 @@ class BeautifulStoneSoup(Tag, SGMLParser):
or (nestingResetTriggers is None and isResetNesting
and self.RESET_NESTING_TAGS.has_key(p.name)):
- #If we encounter one of the nesting reset triggers
- #peculiar to this tag, or we encounter another tag
- #that causes nesting to reset, pop up to but not
- #including that tag.
+ # If we encounter one of the nesting reset triggers
+ # peculiar to this tag, or we encounter another tag
+ # that causes nesting to reset, pop up to but not
+ # including that tag.
popTo = p.name
inclusive = False
break
@@ -1328,10 +1337,10 @@ class BeautifulStoneSoup(Tag, SGMLParser):
self._popToTag(popTo, inclusive)
def unknown_starttag(self, name, attrs, selfClosing=0):
- #print "Start tag %s: %s" % (name, attrs)
+ # print "Start tag %s: %s" % (name, attrs)
if self.quoteStack:
- #This is not a real tag.
- #print "<%s> is not real!" % name
+ # This is not a real tag.
+ # print "<%s> is not real!" % name
attrs = ''.join([' %s="%s"' % (x, y) for x, y in attrs])
self.handle_data('<%s%s>' % (name, attrs))
return
@@ -1341,7 +1350,7 @@ class BeautifulStoneSoup(Tag, SGMLParser):
self._smartPop(name)
if self.parseOnlyThese and len(self.tagStack) <= 1 \
- and (self.parseOnlyThese.text or not self.parseOnlyThese.searchTag(name, attrs)):
+ and (self.parseOnlyThese.text or not self.parseOnlyThese.searchTag(name, attrs)):
return
tag = Tag(self, name, attrs, self.currentTag, self.previous)
@@ -1352,16 +1361,16 @@ class BeautifulStoneSoup(Tag, SGMLParser):
if selfClosing or self.isSelfClosingTag(name):
self.popTag()
if name in self.QUOTE_TAGS:
- #print "Beginning quote (%s)" % name
+ # print "Beginning quote (%s)" % name
self.quoteStack.append(name)
self.literal = 1
return tag
def unknown_endtag(self, name):
- #print "End tag %s" % name
+ # print "End tag %s" % name
if self.quoteStack and self.quoteStack[-1] != name:
- #This is not a real end tag.
- #print "</%s> is not real!" % name
+ # This is not a real end tag.
+ # print "</%s> is not real!" % name
self.handle_data('</%s>' % name)
return
self.endData()
@@ -1412,27 +1421,27 @@ class BeautifulStoneSoup(Tag, SGMLParser):
pass
if not data and self.convertXMLEntities:
- data = self.XML_ENTITIES_TO_SPECIAL_CHARS.get(ref)
+ data = self.XML_ENTITIES_TO_SPECIAL_CHARS.get(ref)
if not data and self.convertHTMLEntities and \
- not self.XML_ENTITIES_TO_SPECIAL_CHARS.get(ref):
- # TODO: We've got a problem here. We're told this is
- # an entity reference, but it's not an XML entity
- # reference or an HTML entity reference. Nonetheless,
- # the logical thing to do is to pass it through as an
- # unrecognized entity reference.
- #
- # Except: when the input is "&carol;" this function
- # will be called with input "carol". When the input is
- # "AT&T", this function will be called with input
- # "T". We have no way of knowing whether a semicolon
- # was present originally, so we don't know whether
- # this is an unknown entity or just a misplaced
- # ampersand.
- #
- # The more common case is a misplaced ampersand, so I
- # escape the ampersand and omit the trailing semicolon.
- data = "&amp;%s" % ref
+ not self.XML_ENTITIES_TO_SPECIAL_CHARS.get(ref):
+ # TODO: We've got a problem here. We're told this is
+ # an entity reference, but it's not an XML entity
+ # reference or an HTML entity reference. Nonetheless,
+ # the logical thing to do is to pass it through as an
+ # unrecognized entity reference.
+ #
+ # Except: when the input is "&carol;" this function
+ # will be called with input "carol". When the input is
+ # "AT&T", this function will be called with input
+ # "T". We have no way of knowing whether a semicolon
+ # was present originally, so we don't know whether
+ # this is an unknown entity or just a misplaced
+ # ampersand.
+ #
+ # The more common case is a misplaced ampersand, so I
+ # escape the ampersand and omit the trailing semicolon.
+ data = "&amp;%s" % ref
if not data:
# This case is different from the one above, because we
# haven't already gone through a supposedly comprehensive
@@ -1452,12 +1461,12 @@ class BeautifulStoneSoup(Tag, SGMLParser):
declaration as a CData object."""
j = None
if self.rawdata[i:i+9] == '<![CDATA[':
- k = self.rawdata.find(']]>', i)
- if k == -1:
- k = len(self.rawdata)
- data = self.rawdata[i+9:k]
- j = k+3
- self._toStringSubclass(data, CData)
+ k = self.rawdata.find(']]>', i)
+ if k == -1:
+ k = len(self.rawdata)
+ data = self.rawdata[i+9:k]
+ j = k+3
+ self._toStringSubclass(data, CData)
else:
try:
j = SGMLParser.parse_declaration(self, i)
@@ -1467,6 +1476,7 @@ class BeautifulStoneSoup(Tag, SGMLParser):
j = i + len(toHandle)
return j
+
class BeautifulSoup(BeautifulStoneSoup):
"""This parser knows the following facts about HTML:
@@ -1522,46 +1532,46 @@ class BeautifulSoup(BeautifulStoneSoup):
BeautifulStoneSoup.__init__(self, *args, **kwargs)
SELF_CLOSING_TAGS = buildTagMap(None,
- ('br' , 'hr', 'input', 'img', 'meta',
- 'spacer', 'link', 'frame', 'base', 'col'))
+ ('br', 'hr', 'input', 'img', 'meta',
+ 'spacer', 'link', 'frame', 'base', 'col'))
PRESERVE_WHITESPACE_TAGS = set(['pre', 'textarea'])
- QUOTE_TAGS = {'script' : None, 'textarea' : None}
+ QUOTE_TAGS = {'script': None, 'textarea': None}
- #According to the HTML standard, each of these inline tags can
- #contain another tag of the same type. Furthermore, it's common
- #to actually use these tags this way.
+ # According to the HTML standard, each of these inline tags can
+ # contain another tag of the same type. Furthermore, it's common
+ # to actually use these tags this way.
NESTABLE_INLINE_TAGS = ('span', 'font', 'q', 'object', 'bdo', 'sub', 'sup',
'center')
- #According to the HTML standard, these block tags can contain
- #another tag of the same type. Furthermore, it's common
- #to actually use these tags this way.
+ # According to the HTML standard, these block tags can contain
+ # another tag of the same type. Furthermore, it's common
+ # to actually use these tags this way.
NESTABLE_BLOCK_TAGS = ('blockquote', 'div', 'fieldset', 'ins', 'del')
- #Lists can contain other lists, but there are restrictions.
- NESTABLE_LIST_TAGS = { 'ol' : [],
- 'ul' : [],
- 'li' : ['ul', 'ol'],
- 'dl' : [],
- 'dd' : ['dl'],
- 'dt' : ['dl'] }
-
- #Tables can contain other tables, but there are restrictions.
- NESTABLE_TABLE_TAGS = {'table' : [],
- 'tr' : ['table', 'tbody', 'tfoot', 'thead'],
- 'td' : ['tr'],
- 'th' : ['tr'],
- 'thead' : ['table'],
- 'tbody' : ['table'],
- 'tfoot' : ['table'],
+ # Lists can contain other lists, but there are restrictions.
+ NESTABLE_LIST_TAGS = {'ol': [],
+ 'ul': [],
+ 'li': ['ul', 'ol'],
+ 'dl': [],
+ 'dd': ['dl'],
+ 'dt': ['dl']}
+
+ # Tables can contain other tables, but there are restrictions.
+ NESTABLE_TABLE_TAGS = {'table': [],
+ 'tr': ['table', 'tbody', 'tfoot', 'thead'],
+ 'td': ['tr'],
+ 'th': ['tr'],
+ 'thead': ['table'],
+ 'tbody': ['table'],
+ 'tfoot': ['table'],
}
NON_NESTABLE_BLOCK_TAGS = ('address', 'form', 'p', 'pre')
- #If one of these tags is encountered, all tags up to the next tag of
- #this type are popped.
+ # If one of these tags is encountered, all tags up to the next tag of
+ # this type are popped.
RESET_NESTING_TAGS = buildTagMap(None, NESTABLE_BLOCK_TAGS, 'noscript',
NON_NESTABLE_BLOCK_TAGS,
NESTABLE_LIST_TAGS,
@@ -1591,11 +1601,11 @@ class BeautifulSoup(BeautifulStoneSoup):
contentType = value
contentTypeIndex = i
- if httpEquiv and contentType: # It's an interesting meta tag.
+ if httpEquiv and contentType: # It's an interesting meta tag.
match = self.CHARSET_RE.search(contentType)
if match:
if (self.declaredHTMLEncoding is not None or
- self.originalEncoding == self.fromEncoding):
+ self.originalEncoding == self.fromEncoding):
# An HTML encoding was sniffed while converting
# the document to Unicode, or an HTML encoding was
# sniffed during a previous pass through the
@@ -1620,9 +1630,11 @@ class BeautifulSoup(BeautifulStoneSoup):
if tag and tagNeedsEncodingSubstitution:
tag.containsSubstitutions = True
+
class StopParsing(Exception):
pass
+
class ICantBelieveItsBeautifulSoup(BeautifulSoup):
"""The BeautifulSoup class is oriented towards skipping over
@@ -1649,9 +1661,9 @@ class ICantBelieveItsBeautifulSoup(BeautifulSoup):
wouldn't be."""
I_CANT_BELIEVE_THEYRE_NESTABLE_INLINE_TAGS = \
- ('em', 'big', 'i', 'small', 'tt', 'abbr', 'acronym', 'strong',
- 'cite', 'code', 'dfn', 'kbd', 'samp', 'strong', 'var', 'b',
- 'big')
+ ('em', 'big', 'i', 'small', 'tt', 'abbr', 'acronym', 'strong',
+ 'cite', 'code', 'dfn', 'kbd', 'samp', 'strong', 'var', 'b',
+ 'big')
I_CANT_BELIEVE_THEYRE_NESTABLE_BLOCK_TAGS = ('noscript',)
@@ -1659,6 +1671,7 @@ class ICantBelieveItsBeautifulSoup(BeautifulSoup):
I_CANT_BELIEVE_THEYRE_NESTABLE_BLOCK_TAGS,
I_CANT_BELIEVE_THEYRE_NESTABLE_INLINE_TAGS)
+
class MinimalSoup(BeautifulSoup):
"""The MinimalSoup class is for parsing HTML that contains
pathologically bad markup. It makes no assumptions about tag
@@ -1672,6 +1685,7 @@ class MinimalSoup(BeautifulSoup):
RESET_NESTING_TAGS = buildTagMap('noscript')
NESTABLE_TAGS = {}
+
class BeautifulSOAP(BeautifulStoneSoup):
"""This class will push a tag with only a single string child into
the tag's parent as an attribute. The attribute's name is the tag
@@ -1699,26 +1713,36 @@ class BeautifulSOAP(BeautifulStoneSoup):
parent._getAttrMap()
if (isinstance(tag, Tag) and len(tag.contents) == 1 and
isinstance(tag.contents[0], NavigableString) and
- not parent.attrMap.has_key(tag.name)):
+ not parent.attrMap.has_key(tag.name)):
parent[tag.name] = tag.contents[0]
BeautifulStoneSoup.popTag(self)
-#Enterprise class names! It has come to our attention that some people
-#think the names of the Beautiful Soup parser classes are too silly
-#and "unprofessional" for use in enterprise screen-scraping. We feel
-#your pain! For such-minded folk, the Beautiful Soup Consortium And
-#All-Night Kosher Bakery recommends renaming this file to
-#"RobustParser.py" (or, in cases of extreme enterprisiness,
-#"RobustParserBeanInterface.class") and using the following
-#enterprise-friendly class aliases:
+# Enterprise class names! It has come to our attention that some people
+# think the names of the Beautiful Soup parser classes are too silly
+# and "unprofessional" for use in enterprise screen-scraping. We feel
+# your pain! For such-minded folk, the Beautiful Soup Consortium And
+# All-Night Kosher Bakery recommends renaming this file to
+# "RobustParser.py" (or, in cases of extreme enterprisiness,
+# "RobustParserBeanInterface.class") and using the following
+# enterprise-friendly class aliases:
+
+
class RobustXMLParser(BeautifulStoneSoup):
pass
+
+
class RobustHTMLParser(BeautifulSoup):
pass
+
+
class RobustWackAssHTMLParser(ICantBelieveItsBeautifulSoup):
pass
+
+
class RobustInsanelyWackAssHTMLParser(MinimalSoup):
pass
+
+
class SimplifyingSOAPParser(BeautifulSOAP):
pass
@@ -1732,6 +1756,7 @@ class SimplifyingSOAPParser(BeautifulSOAP):
# reflect a new encoding: that happens in BeautifulStoneSoup.handle_pi
# (XML) and BeautifulSoup.start_meta (HTML).
+
# Autodetects character encodings.
# Download from http://chardet.feedparser.org/
try:
@@ -1753,6 +1778,7 @@ try:
except ImportError:
pass
+
class UnicodeDammit:
"""A class for detecting the encoding of a *ML document and
converting it to a Unicode string. If the source encoding is
@@ -1763,14 +1789,14 @@ class UnicodeDammit:
# meta tags to the corresponding Python codec names. It only covers
# values that aren't in Python's aliases and can't be determined
# by the heuristics in find_codec.
- CHARSET_ALIASES = { "macintosh" : "mac-roman",
- "x-sjis" : "shift-jis" }
+ CHARSET_ALIASES = {"macintosh": "mac-roman",
+ "x-sjis": "shift-jis"}
def __init__(self, markup, overrideEncodings=[],
smartQuotesTo='xml', isHTML=False):
self.declaredHTMLEncoding = None
self.markup, documentEncoding, sniffedEncoding = \
- self._detectEncoding(markup, isHTML)
+ self._detectEncoding(markup, isHTML)
self.smartQuotesTo = smartQuotesTo
self.triedEncodings = []
if markup == '' or isinstance(markup, unicode):
@@ -1781,11 +1807,13 @@ class UnicodeDammit:
u = None
for proposedEncoding in overrideEncodings:
u = self._convertFrom(proposedEncoding)
- if u: break
+ if u:
+ break
if not u:
for proposedEncoding in (documentEncoding, sniffedEncoding):
u = self._convertFrom(proposedEncoding)
- if u: break
+ if u:
+ break
# If no luck and we have auto-detection library, try that:
if not u and chardet and not isinstance(self.markup, unicode):
@@ -1795,10 +1823,12 @@ class UnicodeDammit:
if not u:
for proposed_encoding in ("utf-8", "windows-1252"):
u = self._convertFrom(proposed_encoding)
- if u: break
+ if u:
+ break
self.unicode = u
- if not u: self.originalEncoding = None
+ if not u:
+ self.originalEncoding = None
def _subMSChar(self, orig):
"""Changes a MS smart quote character to an XML or HTML
@@ -1823,9 +1853,8 @@ class UnicodeDammit:
if self.smartQuotesTo and proposed.lower() in("windows-1252",
"iso-8859-1",
"iso-8859-2"):
- markup = re.compile("([\x80-\x9f])").sub \
- (lambda(x): self._subMSChar(x.group(1)),
- markup)
+ markup = re.compile("([\x80-\x9f])").sub(lambda(x): self._subMSChar(x.group(1)),
+ markup)
try:
# print "Trying to convert document to %s" % proposed
@@ -1836,7 +1865,7 @@ class UnicodeDammit:
# print "That didn't work!"
# print e
return None
- #print "Correct encoding: %s" % proposed
+ # print "Correct encoding: %s" % proposed
return self.markup
def _toUnicode(self, data, encoding):
@@ -1845,11 +1874,11 @@ class UnicodeDammit:
# strip Byte Order Mark (if present)
if (len(data) >= 4) and (data[:2] == '\xfe\xff') \
- and (data[2:4] != '\x00\x00'):
+ and (data[2:4] != '\x00\x00'):
encoding = 'utf-16be'
data = data[2:]
elif (len(data) >= 4) and (data[:2] == '\xff\xfe') \
- and (data[2:4] != '\x00\x00'):
+ and (data[2:4] != '\x00\x00'):
encoding = 'utf-16le'
data = data[2:]
elif data[:3] == '\xef\xbb\xbf':
@@ -1876,7 +1905,7 @@ class UnicodeDammit:
sniffed_xml_encoding = 'utf-16be'
xml_data = unicode(xml_data, 'utf-16be').encode('utf-8')
elif (len(xml_data) >= 4) and (xml_data[:2] == '\xfe\xff') \
- and (xml_data[2:4] != '\x00\x00'):
+ and (xml_data[2:4] != '\x00\x00'):
# UTF-16BE with BOM
sniffed_xml_encoding = 'utf-16be'
xml_data = unicode(xml_data[2:], 'utf-16be').encode('utf-8')
@@ -1885,7 +1914,7 @@ class UnicodeDammit:
sniffed_xml_encoding = 'utf-16le'
xml_data = unicode(xml_data, 'utf-16le').encode('utf-8')
elif (len(xml_data) >= 4) and (xml_data[:2] == '\xff\xfe') and \
- (xml_data[2:4] != '\x00\x00'):
+ (xml_data[2:4] != '\x00\x00'):
# UTF-16LE with BOM
sniffed_xml_encoding = 'utf-16le'
xml_data = unicode(xml_data[2:], 'utf-16le').encode('utf-8')
@@ -1931,15 +1960,15 @@ class UnicodeDammit:
xml_encoding = sniffed_xml_encoding
return xml_data, xml_encoding, sniffed_xml_encoding
-
def find_codec(self, charset):
return self._codec(self.CHARSET_ALIASES.get(charset, charset)) \
- or (charset and self._codec(charset.replace("-", ""))) \
- or (charset and self._codec(charset.replace("-", "_"))) \
- or charset
+ or (charset and self._codec(charset.replace("-", ""))) \
+ or (charset and self._codec(charset.replace("-", "_"))) \
+ or charset
def _codec(self, charset):
- if not charset: return charset
+ if not charset:
+ return charset
codec = None
try:
codecs.lookup(charset)
@@ -1949,68 +1978,69 @@ class UnicodeDammit:
return codec
EBCDIC_TO_ASCII_MAP = None
+
def _ebcdic_to_ascii(self, s):
c = self.__class__
if not c.EBCDIC_TO_ASCII_MAP:
- emap = (0,1,2,3,156,9,134,127,151,141,142,11,12,13,14,15,
- 16,17,18,19,157,133,8,135,24,25,146,143,28,29,30,31,
- 128,129,130,131,132,10,23,27,136,137,138,139,140,5,6,7,
- 144,145,22,147,148,149,150,4,152,153,154,155,20,21,158,26,
- 32,160,161,162,163,164,165,166,167,168,91,46,60,40,43,33,
- 38,169,170,171,172,173,174,175,176,177,93,36,42,41,59,94,
- 45,47,178,179,180,181,182,183,184,185,124,44,37,95,62,63,
- 186,187,188,189,190,191,192,193,194,96,58,35,64,39,61,34,
- 195,97,98,99,100,101,102,103,104,105,196,197,198,199,200,
- 201,202,106,107,108,109,110,111,112,113,114,203,204,205,
- 206,207,208,209,126,115,116,117,118,119,120,121,122,210,
- 211,212,213,214,215,216,217,218,219,220,221,222,223,224,
- 225,226,227,228,229,230,231,123,65,66,67,68,69,70,71,72,
- 73,232,233,234,235,236,237,125,74,75,76,77,78,79,80,81,
- 82,238,239,240,241,242,243,92,159,83,84,85,86,87,88,89,
- 90,244,245,246,247,248,249,48,49,50,51,52,53,54,55,56,57,
- 250,251,252,253,254,255)
+ emap = (0, 1, 2, 3, 156, 9, 134, 127, 151, 141, 142, 11, 12, 13, 14, 15,
+ 16, 17, 18, 19, 157, 133, 8, 135, 24, 25, 146, 143, 28, 29, 30, 31,
+ 128, 129, 130, 131, 132, 10, 23, 27, 136, 137, 138, 139, 140, 5, 6, 7,
+ 144, 145, 22, 147, 148, 149, 150, 4, 152, 153, 154, 155, 20, 21, 158, 26,
+ 32, 160, 161, 162, 163, 164, 165, 166, 167, 168, 91, 46, 60, 40, 43, 33,
+ 38, 169, 170, 171, 172, 173, 174, 175, 176, 177, 93, 36, 42, 41, 59, 94,
+ 45, 47, 178, 179, 180, 181, 182, 183, 184, 185, 124, 44, 37, 95, 62, 63,
+ 186, 187, 188, 189, 190, 191, 192, 193, 194, 96, 58, 35, 64, 39, 61, 34,
+ 195, 97, 98, 99, 100, 101, 102, 103, 104, 105, 196, 197, 198, 199, 200,
+ 201, 202, 106, 107, 108, 109, 110, 111, 112, 113, 114, 203, 204, 205,
+ 206, 207, 208, 209, 126, 115, 116, 117, 118, 119, 120, 121, 122, 210,
+ 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224,
+ 225, 226, 227, 228, 229, 230, 231, 123, 65, 66, 67, 68, 69, 70, 71, 72,
+ 73, 232, 233, 234, 235, 236, 237, 125, 74, 75, 76, 77, 78, 79, 80, 81,
+ 82, 238, 239, 240, 241, 242, 243, 92, 159, 83, 84, 85, 86, 87, 88, 89,
+ 90, 244, 245, 246, 247, 248, 249, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57,
+ 250, 251, 252, 253, 254, 255)
import string
- c.EBCDIC_TO_ASCII_MAP = string.maketrans( \
- ''.join(map(chr, range(256))), ''.join(map(chr, emap)))
+ c.EBCDIC_TO_ASCII_MAP = string.maketrans(
+ ''.join(map(chr, range(256))), ''.join(map(chr, emap)))
return s.translate(c.EBCDIC_TO_ASCII_MAP)
- MS_CHARS = { '\x80' : ('euro', '20AC'),
- '\x81' : ' ',
- '\x82' : ('sbquo', '201A'),
- '\x83' : ('fnof', '192'),
- '\x84' : ('bdquo', '201E'),
- '\x85' : ('hellip', '2026'),
- '\x86' : ('dagger', '2020'),
- '\x87' : ('Dagger', '2021'),
- '\x88' : ('circ', '2C6'),
- '\x89' : ('permil', '2030'),
- '\x8A' : ('Scaron', '160'),
- '\x8B' : ('lsaquo', '2039'),
- '\x8C' : ('OElig', '152'),
- '\x8D' : '?',
- '\x8E' : ('#x17D', '17D'),
- '\x8F' : '?',
- '\x90' : '?',
- '\x91' : ('lsquo', '2018'),
- '\x92' : ('rsquo', '2019'),
- '\x93' : ('ldquo', '201C'),
- '\x94' : ('rdquo', '201D'),
- '\x95' : ('bull', '2022'),
- '\x96' : ('ndash', '2013'),
- '\x97' : ('mdash', '2014'),
- '\x98' : ('tilde', '2DC'),
- '\x99' : ('trade', '2122'),
- '\x9a' : ('scaron', '161'),
- '\x9b' : ('rsaquo', '203A'),
- '\x9c' : ('oelig', '153'),
- '\x9d' : '?',
- '\x9e' : ('#x17E', '17E'),
- '\x9f' : ('Yuml', ''),}
+ MS_CHARS = {'\x80': ('euro', '20AC'),
+ '\x81': ' ',
+ '\x82': ('sbquo', '201A'),
+ '\x83': ('fnof', '192'),
+ '\x84': ('bdquo', '201E'),
+ '\x85': ('hellip', '2026'),
+ '\x86': ('dagger', '2020'),
+ '\x87': ('Dagger', '2021'),
+ '\x88': ('circ', '2C6'),
+ '\x89': ('permil', '2030'),
+ '\x8A': ('Scaron', '160'),
+ '\x8B': ('lsaquo', '2039'),
+ '\x8C': ('OElig', '152'),
+ '\x8D': '?',
+ '\x8E': ('#x17D', '17D'),
+ '\x8F': '?',
+ '\x90': '?',
+ '\x91': ('lsquo', '2018'),
+ '\x92': ('rsquo', '2019'),
+ '\x93': ('ldquo', '201C'),
+ '\x94': ('rdquo', '201D'),
+ '\x95': ('bull', '2022'),
+ '\x96': ('ndash', '2013'),
+ '\x97': ('mdash', '2014'),
+ '\x98': ('tilde', '2DC'),
+ '\x99': ('trade', '2122'),
+ '\x9a': ('scaron', '161'),
+ '\x9b': ('rsaquo', '203A'),
+ '\x9c': ('oelig', '153'),
+ '\x9d': '?',
+ '\x9e': ('#x17E', '17E'),
+ '\x9f': ('Yuml', ''), }
#######################################################################
-#By default, act as an HTML pretty-printer.
+# By default, act as an HTML pretty-printer.
if __name__ == '__main__':
import sys
soup = BeautifulSoup(sys.stdin)
diff --git a/cgi/anarkia.py b/cgi/anarkia.py
index 6b7e5fd..de9152f 100644
--- a/cgi/anarkia.py
+++ b/cgi/anarkia.py
@@ -10,430 +10,480 @@ from settings import Settings
d_thread = {}
d_post = {}
+
def anarkia(self, path_split):
- setBoard('anarkia')
-
- if len(path_split) <= 2:
- self.output = main()
- return
-
- raise UserError, 'Ya fue, baisano...'
-
- if path_split[2] == 'opt':
- self.output = boardoptions(self.formdata)
- elif path_split[2] == 'mod':
- self.output = mod(self.formdata)
- elif path_split[2] == 'bans':
- self.output = bans(self.formdata)
- elif path_split[2] == 'css':
- self.output = css(self.formdata)
- elif path_split[2] == 'type':
- self.output = type(self.formdata)
- elif path_split[2] == 'emojis':
- self.output = emojis(self.formdata)
- else:
- raise UserError, 'ke?'
+ setBoard('anarkia')
+
+ if len(path_split) <= 2:
+ self.output = main()
+ return
+
+ raise UserError, 'Ya fue, baisano...'
+
+ if path_split[2] == 'opt':
+ self.output = boardoptions(self.formdata)
+ elif path_split[2] == 'mod':
+ self.output = mod(self.formdata)
+ elif path_split[2] == 'bans':
+ self.output = bans(self.formdata)
+ elif path_split[2] == 'css':
+ self.output = css(self.formdata)
+ elif path_split[2] == 'type':
+ self.output = type(self.formdata)
+ elif path_split[2] == 'emojis':
+ self.output = emojis(self.formdata)
+ else:
+ raise UserError, 'ke?'
+
def main():
- board = Settings._.BOARD
-
- logs = FetchAll("SELECT * FROM `logs` WHERE `staff` = 'Anarko' ORDER BY `timestamp` DESC")
- for log in logs:
- log['timestamp_formatted'] = formatTimestamp(log['timestamp'])
-
- return renderTemplate('anarkia.html', {'mode': 0, 'logs': logs})
+ board = Settings._.BOARD
+
+ logs = FetchAll(
+ "SELECT * FROM `logs` WHERE `staff` = 'Anarko' ORDER BY `timestamp` DESC")
+ for log in logs:
+ log['timestamp_formatted'] = formatTimestamp(log['timestamp'])
+
+ return renderTemplate('anarkia.html', {'mode': 0, 'logs': logs})
+
def type(formdata):
- board = Settings._.BOARD
-
- if board['board_type'] == '1':
- (type_now, type_do, do_num) = ('BBS', 'IB', '0')
- else:
- (type_now, type_do, do_num) = ('IB', 'BBS', '1')
-
- if formdata.get('transform') == 'do':
- t = 0
- try:
- with open('anarkia_time') as f:
- t = int(f.read())
- except IOError:
- pass
-
- dif = time.time() - t
- if dif > (10 * 60):
- #if True:
- import re
- t = time.time()
-
- board['board_type'] = do_num
- board['force_css'] = Settings.HOME_URL + 'anarkia/style_' + type_do.lower() + '.css'
- updateBoardSettings()
-
- # update posts
- fix_board()
-
- # regenerate
- setBoard('anarkia')
- regenerateBoard(True)
-
- tf = timeTaken(t, time.time())
-
- with open('anarkia_time', 'w') as f:
- t = f.write(str(int(time.time())))
-
- msg = 'Cambiada estructura de sección a %s. (%s)' % (type_do, tf)
- logAction(msg)
- return renderTemplate('anarkia.html', {'mode': 99, 'msg': msg})
+ board = Settings._.BOARD
+
+ if board['board_type'] == '1':
+ (type_now, type_do, do_num) = ('BBS', 'IB', '0')
else:
- raise UserError, 'Esta acción sólo se puede realizar cada 10 minutos. Faltan: %d mins.' % (10-int(dif/60))
-
- return renderTemplate('anarkia.html', {'mode': 7, 'type_now': type_now, 'type_do': type_do})
+ (type_now, type_do, do_num) = ('IB', 'BBS', '1')
+
+ if formdata.get('transform') == 'do':
+ t = 0
+ try:
+ with open('anarkia_time') as f:
+ t = int(f.read())
+ except IOError:
+ pass
+
+ dif = time.time() - t
+ if dif > (10 * 60):
+ # if True:
+ import re
+ t = time.time()
+
+ board['board_type'] = do_num
+ board['force_css'] = Settings.HOME_URL + \
+ 'anarkia/style_' + type_do.lower() + '.css'
+ updateBoardSettings()
+
+ # update posts
+ fix_board()
+
+ # regenerate
+ setBoard('anarkia')
+ regenerateBoard(True)
+
+ tf = timeTaken(t, time.time())
+
+ with open('anarkia_time', 'w') as f:
+ t = f.write(str(int(time.time())))
+
+ msg = 'Cambiada estructura de sección a %s. (%s)' % (type_do, tf)
+ logAction(msg)
+ return renderTemplate('anarkia.html', {'mode': 99, 'msg': msg})
+ else:
+ raise UserError, 'Esta acción sólo se puede realizar cada 10 minutos. Faltan: %d mins.' % (10-int(dif/60))
+
+ return renderTemplate('anarkia.html', {'mode': 7, 'type_now': type_now, 'type_do': type_do})
+
def fix_board():
- board = Settings._.BOARD
- get_fix_dictionary()
-
- if board['board_type'] == '1':
- to_fix = FetchAll("SELECT * FROM posts WHERE message LIKE '%%anarkia/res/%%' AND boardid = %s" % board['id'])
- else:
- to_fix = FetchAll("SELECT * FROM posts WHERE message LIKE '%%anarkia/read/%%' AND boardid = %s" % board['id'])
-
- for p in to_fix:
- try:
- if board['board_type'] == '1':
- newmessage = re.sub(r'/anarkia/res/(\d+).html#(\d+)">&gt;&gt;(\d+)', fix_to_bbs, p['message'])
- else:
- newmessage = re.sub(r'/anarkia/read/(\d+)/(\d+)">&gt;&gt;(\d+)', fix_to_ib, p['message'])
-
- UpdateDb("UPDATE posts SET message = '%s' WHERE boardid = %s AND id = %s" % \
- (_mysql.escape_string(newmessage), board['id'], p['id']))
- except KeyError:
- pass
-
- return True
+ board = Settings._.BOARD
+ get_fix_dictionary()
+
+ if board['board_type'] == '1':
+ to_fix = FetchAll(
+ "SELECT * FROM posts WHERE message LIKE '%%anarkia/res/%%' AND boardid = %s" % board['id'])
+ else:
+ to_fix = FetchAll(
+ "SELECT * FROM posts WHERE message LIKE '%%anarkia/read/%%' AND boardid = %s" % board['id'])
+
+ for p in to_fix:
+ try:
+ if board['board_type'] == '1':
+ newmessage = re.sub(
+ r'/anarkia/res/(\d+).html#(\d+)">&gt;&gt;(\d+)', fix_to_bbs, p['message'])
+ else:
+ newmessage = re.sub(
+ r'/anarkia/read/(\d+)/(\d+)">&gt;&gt;(\d+)', fix_to_ib, p['message'])
+
+ UpdateDb("UPDATE posts SET message = '%s' WHERE boardid = %s AND id = %s" %
+ (_mysql.escape_string(newmessage), board['id'], p['id']))
+ except KeyError:
+ pass
+
+ return True
+
def fix_to_bbs(matchobj):
- threadid = matchobj.group(1)
- pid = matchobj.group(2)
- new_thread = d_thread[threadid]
- new_post = d_post[new_thread][pid]
- return '/anarkia/read/%s/%s">&gt;&gt;%s' % (new_thread, new_post, new_post)
-
+ threadid = matchobj.group(1)
+ pid = matchobj.group(2)
+ new_thread = d_thread[threadid]
+ new_post = d_post[new_thread][pid]
+ return '/anarkia/read/%s/%s">&gt;&gt;%s' % (new_thread, new_post, new_post)
+
+
def fix_to_ib(matchobj):
- threadid = matchobj.group(1)
- num = int(matchobj.group(2))
- new_thread = d_thread[threadid]
- new_post = d_post[new_thread][num]
- return '/anarkia/res/%s.html#%s">&gt;&gt;%s' % (new_thread, new_post, new_post)
-
+ threadid = matchobj.group(1)
+ num = int(matchobj.group(2))
+ new_thread = d_thread[threadid]
+ new_post = d_post[new_thread][num]
+ return '/anarkia/res/%s.html#%s">&gt;&gt;%s' % (new_thread, new_post, new_post)
+
+
def get_fix_dictionary():
- global d_thread, d_post
- board = Settings._.BOARD
- res = FetchAll("SELECT id, timestamp, parentid FROM posts WHERE boardid = %s ORDER BY CASE parentid WHEN 0 THEN id ELSE parentid END ASC, `id` ASC" % board['id'])
- num = 1
- thread = 0
- for p in res:
- pid = p['id']
- if p['parentid'] == '0':
- num = 1
-
- time = p['timestamp']
- if board['board_type'] == '1':
- d_thread[pid] = time
- thread = time
- else:
- d_thread[time] = pid
- thread = pid
-
- d_post[thread] = {}
-
+ global d_thread, d_post
+ board = Settings._.BOARD
+ res = FetchAll(
+ "SELECT id, timestamp, parentid FROM posts WHERE boardid = %s ORDER BY CASE parentid WHEN 0 THEN id ELSE parentid END ASC, `id` ASC" % board['id'])
+ num = 1
+ thread = 0
+ for p in res:
+ pid = p['id']
+ if p['parentid'] == '0':
+ num = 1
+
+ time = p['timestamp']
+ if board['board_type'] == '1':
+ d_thread[pid] = time
+ thread = time
+ else:
+ d_thread[time] = pid
+ thread = pid
+
+ d_post[thread] = {}
+
+ if board['board_type'] == '1':
+ d_post[thread][pid] = num
+ else:
+ d_post[thread][num] = pid
+ num += 1
+
+ return
+
+
+def css(formdata):
+ board = Settings._.BOARD
+
if board['board_type'] == '1':
- d_post[thread][pid] = num
+ basename = 'style_bbs.css'
else:
- d_post[thread][num] = pid
- num += 1
-
- return
-
-def css(formdata):
- board = Settings._.BOARD
-
- if board['board_type'] == '1':
- basename = 'style_bbs.css'
- else:
- basename = 'style_ib.css'
-
- fname = '%sanarkia/%s' % (Settings.HOME_DIR, basename)
-
- if formdata.get('cssfile'):
- with open(fname, 'w') as f:
- cssfile = f.write(formdata['cssfile'])
-
- msg = 'CSS actualizado.'
- logAction(msg)
- return renderTemplate('anarkia.html', {'mode': 99, 'msg': msg})
-
- with open(fname) as f:
- cssfile = f.read()
-
- return renderTemplate('anarkia.html', {'mode': 6, 'basename': basename, 'cssfile': cssfile})
-
+ basename = 'style_ib.css'
+
+ fname = '%sanarkia/%s' % (Settings.HOME_DIR, basename)
+
+ if formdata.get('cssfile'):
+ with open(fname, 'w') as f:
+ cssfile = f.write(formdata['cssfile'])
+
+ msg = 'CSS actualizado.'
+ logAction(msg)
+ return renderTemplate('anarkia.html', {'mode': 99, 'msg': msg})
+
+ with open(fname) as f:
+ cssfile = f.read()
+
+ return renderTemplate('anarkia.html', {'mode': 6, 'basename': basename, 'cssfile': cssfile})
+
+
def bans(formdata):
- board = Settings._.BOARD
-
- if formdata.get('unban'):
- unban = int(formdata['unban'])
- boardpickle = pickle.dumps(['anarkia'])
-
- ban = FetchOne("SELECT * FROM `bans` WHERE id = %d" % unban)
- if not ban:
- raise UserError, "Ban inválido."
- if ban['boards'] != boardpickle:
- raise USerError, "Ban inválido."
-
- UpdateDb('DELETE FROM `bans` WHERE id = %s' % ban['id'])
- logAction("Usuario %s desbaneado." % ban['ip'][:4])
- regenerateAccess()
-
- bans = FetchAll('SELECT * FROM `bans` WHERE staff = \'anarko\'')
- for ban in bans:
- ban['added'] = formatTimestamp(ban['added'])
- if ban['until'] == '0':
- ban['until'] = _('Does not expire')
- else:
- ban['until'] = formatTimestamp(ban['until'])
- return renderTemplate('anarkia.html', {'mode': 5, 'bans': bans})
-
+ board = Settings._.BOARD
+
+ if formdata.get('unban'):
+ unban = int(formdata['unban'])
+ boardpickle = pickle.dumps(['anarkia'])
+
+ ban = FetchOne("SELECT * FROM `bans` WHERE id = %d" % unban)
+ if not ban:
+ raise UserError, "Ban inválido."
+ if ban['boards'] != boardpickle:
+ raise USerError, "Ban inválido."
+
+ UpdateDb('DELETE FROM `bans` WHERE id = %s' % ban['id'])
+ logAction("Usuario %s desbaneado." % ban['ip'][:4])
+ regenerateAccess()
+
+ bans = FetchAll('SELECT * FROM `bans` WHERE staff = \'anarko\'')
+ for ban in bans:
+ ban['added'] = formatTimestamp(ban['added'])
+ if ban['until'] == '0':
+ ban['until'] = _('Does not expire')
+ else:
+ ban['until'] = formatTimestamp(ban['until'])
+ return renderTemplate('anarkia.html', {'mode': 5, 'bans': bans})
+
+
def mod(formdata):
- board = Settings._.BOARD
-
- if formdata.get('thread'):
- parentid = int(formdata['thread'])
- posts = FetchAll('SELECT * FROM `posts` WHERE (parentid = %d OR id = %d) AND boardid = %s ORDER BY `id` ASC' % (parentid, parentid, board['id']))
- return renderTemplate('anarkia.html', {'mode': 3, 'posts': posts})
- elif formdata.get('lock'):
- postid = int(formdata['lock'])
- post = FetchOne('SELECT id, locked FROM posts WHERE boardid = %s AND id = %d AND parentid = 0 LIMIT 1' % (board['id'], postid))
- if post['locked'] == '0':
- setLocked = 1
- msg = "Hilo %s cerrado." % post['id']
- else:
- setLocked = 0
- msg = "Hilo %s abierto." % post['id']
-
- UpdateDb("UPDATE `posts` SET `locked` = %d WHERE `boardid` = '%s' AND `id` = '%s' LIMIT 1" % (setLocked, board["id"], post["id"]))
- threadUpdated(post['id'])
- logAction(msg)
- return renderTemplate('anarkia.html', {'mode': 99, 'msg': msg})
- elif formdata.get('del'):
- postid = int(formdata['del'])
- post = FetchOne('SELECT id, parentid FROM posts WHERE boardid = %s AND id = %d LIMIT 1' % (board['id'], postid))
- if post['parentid'] != '0':
- deletePost(post['id'], None, '3', False)
- msg = "Mensaje %s eliminado." % post['id']
- logAction(msg)
- return renderTemplate('anarkia.html', {'mode': 99, 'msg': msg})
- else:
- raise UserError, "jaj no"
- elif formdata.get('restore'):
- postid = int(formdata['restore'])
- post = FetchOne('SELECT id, parentid FROM posts WHERE boardid = %s AND id = %d LIMIT 1' % (board['id'], postid))
-
- UpdateDb('UPDATE `posts` SET `IS_DELETED` = 0 WHERE `boardid` = %s AND `id` = %s LIMIT 1' % (board['id'], post['id']))
- if post['parentid'] != '0':
- threadUpdated(post['parentid'])
+ board = Settings._.BOARD
+
+ if formdata.get('thread'):
+ parentid = int(formdata['thread'])
+ posts = FetchAll('SELECT * FROM `posts` WHERE (parentid = %d OR id = %d) AND boardid = %s ORDER BY `id` ASC' %
+ (parentid, parentid, board['id']))
+ return renderTemplate('anarkia.html', {'mode': 3, 'posts': posts})
+ elif formdata.get('lock'):
+ postid = int(formdata['lock'])
+ post = FetchOne('SELECT id, locked FROM posts WHERE boardid = %s AND id = %d AND parentid = 0 LIMIT 1' % (
+ board['id'], postid))
+ if post['locked'] == '0':
+ setLocked = 1
+ msg = "Hilo %s cerrado." % post['id']
+ else:
+ setLocked = 0
+ msg = "Hilo %s abierto." % post['id']
+
+ UpdateDb("UPDATE `posts` SET `locked` = %d WHERE `boardid` = '%s' AND `id` = '%s' LIMIT 1" % (
+ setLocked, board["id"], post["id"]))
+ threadUpdated(post['id'])
+ logAction(msg)
+ return renderTemplate('anarkia.html', {'mode': 99, 'msg': msg})
+ elif formdata.get('del'):
+ postid = int(formdata['del'])
+ post = FetchOne('SELECT id, parentid FROM posts WHERE boardid = %s AND id = %d LIMIT 1' % (
+ board['id'], postid))
+ if post['parentid'] != '0':
+ deletePost(post['id'], None, '3', False)
+ msg = "Mensaje %s eliminado." % post['id']
+ logAction(msg)
+ return renderTemplate('anarkia.html', {'mode': 99, 'msg': msg})
+ else:
+ raise UserError, "jaj no"
+ elif formdata.get('restore'):
+ postid = int(formdata['restore'])
+ post = FetchOne('SELECT id, parentid FROM posts WHERE boardid = %s AND id = %d LIMIT 1' % (
+ board['id'], postid))
+
+ UpdateDb('UPDATE `posts` SET `IS_DELETED` = 0 WHERE `boardid` = %s AND `id` = %s LIMIT 1' % (
+ board['id'], post['id']))
+ if post['parentid'] != '0':
+ threadUpdated(post['parentid'])
+ else:
+ regenerateFrontPages()
+ msg = "Mensaje %s recuperado." % post['id']
+ logAction(msg)
+ return renderTemplate('anarkia.html', {'mode': 99, 'msg': msg})
+ elif formdata.get('ban'):
+ postid = int(formdata['ban'])
+ post = FetchOne('SELECT id, ip FROM posts WHERE boardid = %s AND id = %d LIMIT 1' % (
+ board['id'], postid))
+
+ return renderTemplate('anarkia.html', {'mode': 4, 'post': post})
+ elif formdata.get('banto'):
+ postid = int(formdata['banto'])
+ post = FetchOne('SELECT id, message, parentid, ip FROM posts WHERE boardid = %s AND id = %d LIMIT 1' % (
+ board['id'], postid))
+
+ reason = formdata.get('reason').replace(
+ 'script', '').replace('meta', '')
+ if reason is not None:
+ if formdata['seconds'] != '0':
+ until = str(timestamp() + int(formdata['seconds']))
+ else:
+ until = '0'
+ where = pickle.dumps(['anarkia'])
+
+ ban = FetchOne("SELECT `id` FROM `bans` WHERE `ip` = '" +
+ post['ip'] + "' AND `boards` = '" + _mysql.escape_string(where) + "' LIMIT 1")
+ if ban:
+ raise UserError, "Este usuario ya esta baneado."
+
+ # Blind mode
+ if formdata.get('blind') == '1':
+ blind = '1'
+ else:
+ blind = '0'
+
+ InsertDb("INSERT INTO `bans` (`ip`, `netmask`, `boards`, `added`, `until`, `staff`, `reason`, `blind`) VALUES ('" + post['ip'] + "', INET_ATON('255.255.255.255'), '" + _mysql.escape_string(
+ where) + "', " + str(timestamp()) + ", " + until + ", 'anarko', '" + _mysql.escape_string(formdata['reason']) + "', '"+blind+"')")
+
+ newmessage = post['message'] + \
+ '<hr /><span class="banned">A este usuario se le revocó el acceso. Razón: %s</span>' % reason
+
+ UpdateDb("UPDATE posts SET message = '%s' WHERE boardid = %s AND id = %s" % (
+ _mysql.escape_string(newmessage), board['id'], post['id']))
+ if post['parentid'] != '0':
+ threadUpdated(post['parentid'])
+ else:
+ regenerateFrontPages()
+ regenerateAccess()
+
+ msg = "Usuario %s baneado." % post['ip'][:4]
+ logAction(msg)
+ return renderTemplate('anarkia.html', {'mode': 99, 'msg': msg})
else:
- regenerateFrontPages()
- msg = "Mensaje %s recuperado." % post['id']
- logAction(msg)
- return renderTemplate('anarkia.html', {'mode': 99, 'msg': msg})
- elif formdata.get('ban'):
- postid = int(formdata['ban'])
- post = FetchOne('SELECT id, ip FROM posts WHERE boardid = %s AND id = %d LIMIT 1' % (board['id'], postid))
-
- return renderTemplate('anarkia.html', {'mode': 4, 'post': post})
- elif formdata.get('banto'):
- postid = int(formdata['banto'])
- post = FetchOne('SELECT id, message, parentid, ip FROM posts WHERE boardid = %s AND id = %d LIMIT 1' % (board['id'], postid))
-
- reason = formdata.get('reason').replace('script', '').replace('meta', '')
- if reason is not None:
- if formdata['seconds'] != '0':
- until = str(timestamp() + int(formdata['seconds']))
- else:
- until = '0'
- where = pickle.dumps(['anarkia'])
-
- ban = FetchOne("SELECT `id` FROM `bans` WHERE `ip` = '" + post['ip'] + "' AND `boards` = '" + _mysql.escape_string(where) + "' LIMIT 1")
- if ban:
- raise UserError, "Este usuario ya esta baneado."
-
- # Blind mode
- if formdata.get('blind') == '1':
- blind = '1'
- else:
- blind = '0'
-
- InsertDb("INSERT INTO `bans` (`ip`, `netmask`, `boards`, `added`, `until`, `staff`, `reason`, `blind`) VALUES ('" + post['ip'] + "', INET_ATON('255.255.255.255'), '" + _mysql.escape_string(where) + "', " + str(timestamp()) + ", " + until + ", 'anarko', '" + _mysql.escape_string(formdata['reason']) + "', '"+blind+"')")
-
- newmessage = post['message'] + '<hr /><span class="banned">A este usuario se le revocó el acceso. Razón: %s</span>' % reason
-
- UpdateDb("UPDATE posts SET message = '%s' WHERE boardid = %s AND id = %s" % (_mysql.escape_string(newmessage), board['id'], post['id']))
- if post['parentid'] != '0':
- threadUpdated(post['parentid'])
- else:
- regenerateFrontPages()
- regenerateAccess()
-
- msg = "Usuario %s baneado." % post['ip'][:4]
- logAction(msg)
- return renderTemplate('anarkia.html', {'mode': 99, 'msg': msg})
- else:
- reports = FetchAll("SELECT * FROM `reports` WHERE board = 'anarkia'")
- threads = FetchAll('SELECT * FROM `posts` WHERE boardid = %s AND parentid = 0 ORDER BY `bumped` DESC' % board['id'])
- return renderTemplate('anarkia.html', {'mode': 2, 'threads': threads, 'reports': reports})
-
+ reports = FetchAll("SELECT * FROM `reports` WHERE board = 'anarkia'")
+ threads = FetchAll(
+ 'SELECT * FROM `posts` WHERE boardid = %s AND parentid = 0 ORDER BY `bumped` DESC' % board['id'])
+ return renderTemplate('anarkia.html', {'mode': 2, 'threads': threads, 'reports': reports})
+
+
def boardoptions(formdata):
- board = Settings._.BOARD
-
- if formdata.get('longname'):
- # submitted
- board['longname'] = formdata['longname'].replace('script', '')
- board['postarea_desc'] = formdata['postarea_desc'].replace('script', '').replace('meta', '')
- board['postarea_extra'] = formdata['postarea_extra'].replace('script', '').replace('meta', '')
- board['anonymous'] = formdata['anonymous'].replace('script', '')
- board['subject'] = formdata['subject'].replace('script', '')
- board['message'] = formdata['message'].replace('script', '')
- board['useid'] = formdata['useid']
- if 'disable_name' in formdata.keys():
- board['disable_name'] = '1'
- else:
- board['disable_name'] = '0'
- if 'disable_subject' in formdata.keys():
- board['disable_subject'] = '1'
- else:
- board['disable_subject'] = '0'
- if 'allow_noimage' in formdata.keys():
- board['allow_noimage'] = '1'
- else:
- board['allow_noimage'] = '0'
- if 'allow_images' in formdata.keys():
- board['allow_images'] = '1'
- else:
- board['allow_images'] = '0'
- if 'allow_image_replies' in formdata.keys():
- board['allow_image_replies'] = '1'
+ board = Settings._.BOARD
+
+ if formdata.get('longname'):
+ # submitted
+ board['longname'] = formdata['longname'].replace('script', '')
+ board['postarea_desc'] = formdata['postarea_desc'].replace(
+ 'script', '').replace('meta', '')
+ board['postarea_extra'] = formdata['postarea_extra'].replace(
+ 'script', '').replace('meta', '')
+ board['anonymous'] = formdata['anonymous'].replace('script', '')
+ board['subject'] = formdata['subject'].replace('script', '')
+ board['message'] = formdata['message'].replace('script', '')
+ board['useid'] = formdata['useid']
+ if 'disable_name' in formdata.keys():
+ board['disable_name'] = '1'
+ else:
+ board['disable_name'] = '0'
+ if 'disable_subject' in formdata.keys():
+ board['disable_subject'] = '1'
+ else:
+ board['disable_subject'] = '0'
+ if 'allow_noimage' in formdata.keys():
+ board['allow_noimage'] = '1'
+ else:
+ board['allow_noimage'] = '0'
+ if 'allow_images' in formdata.keys():
+ board['allow_images'] = '1'
+ else:
+ board['allow_images'] = '0'
+ if 'allow_image_replies' in formdata.keys():
+ board['allow_image_replies'] = '1'
+ else:
+ board['allow_image_replies'] = '0'
+
+ # Update file types
+ UpdateDb("DELETE FROM `boards_filetypes` WHERE `boardid` = %s" %
+ board['id'])
+ for filetype in filetypelist():
+ if 'filetype'+filetype['ext'] in formdata.keys():
+ UpdateDb("INSERT INTO `boards_filetypes` VALUES (%s, %s)" %
+ (board['id'], filetype['id']))
+
+ try:
+ board['maxsize'] = int(formdata['maxsize'])
+ if board['maxsize'] > 10000:
+ board['maxsize'] = 10000
+ except:
+ raise UserError, _("Max size must be numeric.")
+
+ try:
+ board['thumb_px'] = int(formdata['thumb_px'])
+ if board['thumb_px'] > 500:
+ board['thumb_px'] = 500
+ except:
+ raise UserError, _("Max thumb dimensions must be numeric.")
+
+ try:
+ board['numthreads'] = int(formdata['numthreads'])
+ if board['numthreads'] > 15:
+ board['numthreads'] = 15
+ except:
+ raise UserError, _("Max threads shown must be numeric.")
+
+ try:
+ board['numcont'] = int(formdata['numcont'])
+ if board['numcont'] > 15:
+ board['numcont'] = 15
+ except:
+ raise UserError, _("Max replies shown must be numeric.")
+
+ t = time.time()
+ updateBoardSettings()
+ setBoard('anarkia')
+ regenerateBoard(True)
+ tf = timeTaken(t, time.time())
+
+ msg = 'Opciones cambiadas. %s' % tf
+ logAction(msg)
+ return renderTemplate('anarkia.html', {'mode': 99, 'msg': msg})
else:
- board['allow_image_replies'] = '0'
-
- # Update file types
- UpdateDb("DELETE FROM `boards_filetypes` WHERE `boardid` = %s" % board['id'])
- for filetype in filetypelist():
- if 'filetype'+filetype['ext'] in formdata.keys():
- UpdateDb("INSERT INTO `boards_filetypes` VALUES (%s, %s)" % (board['id'], filetype['id']))
-
- try:
- board['maxsize'] = int(formdata['maxsize'])
- if board['maxsize'] > 10000:
- board['maxsize'] = 10000
- except:
- raise UserError, _("Max size must be numeric.")
-
- try:
- board['thumb_px'] = int(formdata['thumb_px'])
- if board['thumb_px'] > 500:
- board['thumb_px'] = 500
- except:
- raise UserError, _("Max thumb dimensions must be numeric.")
-
- try:
- board['numthreads'] = int(formdata['numthreads'])
- if board['numthreads'] > 15:
- board['numthreads'] = 15
- except:
- raise UserError, _("Max threads shown must be numeric.")
-
- try:
- board['numcont'] = int(formdata['numcont'])
- if board['numcont'] > 15:
- board['numcont'] = 15
- except:
- raise UserError, _("Max replies shown must be numeric.")
-
- t = time.time()
- updateBoardSettings()
- setBoard('anarkia')
- regenerateBoard(True)
- tf = timeTaken(t, time.time())
-
- msg = 'Opciones cambiadas. %s' % tf
- logAction(msg)
- return renderTemplate('anarkia.html', {'mode': 99, 'msg': msg})
- else:
- return renderTemplate('anarkia.html', {'mode': 1, 'boardopts': board, 'filetypes': filetypelist(), 'supported_filetypes': board['filetypes_ext']})
+ return renderTemplate('anarkia.html', {'mode': 1, 'boardopts': board, 'filetypes': filetypelist(), 'supported_filetypes': board['filetypes_ext']})
+
def emojis(formdata):
- board = Settings._.BOARD
- board_pickle = _mysql.escape_string(pickle.dumps([board['dir']]))
-
- if formdata.get('new'):
- import re
- ext = {'image/jpeg': 'jpg', 'image/gif': 'gif', 'image/png': 'png'}
-
- if not formdata['name']:
- raise UserError, 'Ingresa nombre.'
- if not re.match(r"^[0-9a-zA-Z]+$", formdata['name']):
- raise UserError, 'Nombre inválido; solo letras/números.'
-
- name = ":%s:" % formdata['name'][:15]
- data = formdata['file']
-
- if not data:
- raise UserError, 'Ingresa imagen.'
-
- # check if it exists
- already = FetchOne("SELECT 1 FROM `filters` WHERE `boards` = '%s' AND `from` = '%s'" % (board_pickle, _mysql.escape_string(name)))
- if already:
- raise UserError, 'Este emoji ya existe.'
-
- # get image information
- content_type, width, height, size, extra = getImageInfo(data)
-
- if content_type not in ext.keys():
- raise UserError, 'Formato inválido.'
- if width > 500 or height > 500:
- raise UserError, 'Dimensiones muy altas.'
- if size > 150000:
- raise UserError, 'Tamaño muy grande.'
-
- # create file names
- thumb_width, thumb_height = getThumbDimensions(width, height, 32)
-
- file_path = Settings.ROOT_DIR + board["dir"] + "/e/" + formdata['name'][:15] + '.' + ext[content_type]
- file_url = Settings.BOARDS_URL + board["dir"] + "/e/" + formdata['name'][:15] + '.' + ext[content_type]
- to_filter = '<img src="%s" width="%d" height="%d" />' % (file_url, thumb_width, thumb_height)
-
- # start processing image
- args = [Settings.CONVERT_PATH, "-", "-limit" , "thread", "1", "-resize", "%dx%d" % (thumb_width, thumb_height), "-quality", "80", file_path]
- p = subprocess.Popen(args, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE)
- out = p.communicate(input=data)[0]
-
- # insert into DB
- sql = "INSERT INTO `filters` (`boards`, `type`, `action`, `from`, `to`, `staff`, `added`) VALUES ('%s', 0, 1, '%s', '%s', 'Anarko', '%s')" % (board_pickle, _mysql.escape_string(name), _mysql.escape_string(to_filter), timestamp())
- UpdateDb(sql)
-
- msg = "Emoji %s agregado." % name
- logAction(msg)
- return renderTemplate('anarkia.html', {'mode': 99, 'msg': msg})
- elif formdata.get('del'):
- return renderTemplate('anarkia.html', {'mode': 99, 'msg': 'Del.'})
- else:
- filters = FetchAll("SELECT * FROM `filters` WHERE `boards` = '%s' ORDER BY `added` DESC" % board_pickle)
- return renderTemplate('anarkia.html', {'mode': 8, 'emojis': filters})
+ board = Settings._.BOARD
+ board_pickle = _mysql.escape_string(pickle.dumps([board['dir']]))
+
+ if formdata.get('new'):
+ import re
+ ext = {'image/jpeg': 'jpg', 'image/gif': 'gif', 'image/png': 'png'}
+
+ if not formdata['name']:
+ raise UserError, 'Ingresa nombre.'
+ if not re.match(r"^[0-9a-zA-Z]+$", formdata['name']):
+ raise UserError, 'Nombre inválido; solo letras/números.'
+
+ name = ":%s:" % formdata['name'][:15]
+ data = formdata['file']
+
+ if not data:
+ raise UserError, 'Ingresa imagen.'
+
+ # check if it exists
+ already = FetchOne("SELECT 1 FROM `filters` WHERE `boards` = '%s' AND `from` = '%s'" % (
+ board_pickle, _mysql.escape_string(name)))
+ if already:
+ raise UserError, 'Este emoji ya existe.'
+
+ # get image information
+ content_type, width, height, size, extra = getImageInfo(data)
+
+ if content_type not in ext.keys():
+ raise UserError, 'Formato inválido.'
+ if width > 500 or height > 500:
+ raise UserError, 'Dimensiones muy altas.'
+ if size > 150000:
+ raise UserError, 'Tamaño muy grande.'
+
+ # create file names
+ thumb_width, thumb_height = getThumbDimensions(width, height, 32)
+
+ file_path = Settings.ROOT_DIR + \
+ board["dir"] + "/e/" + formdata['name'][:15] + \
+ '.' + ext[content_type]
+ file_url = Settings.BOARDS_URL + \
+ board["dir"] + "/e/" + formdata['name'][:15] + \
+ '.' + ext[content_type]
+ to_filter = '<img src="%s" width="%d" height="%d" />' % (
+ file_url, thumb_width, thumb_height)
+
+ # start processing image
+ args = [Settings.CONVERT_PATH, "-", "-limit", "thread", "1", "-resize",
+ "%dx%d" % (thumb_width, thumb_height), "-quality", "80", file_path]
+ p = subprocess.Popen(args, stdout=subprocess.PIPE,
+ stdin=subprocess.PIPE, stderr=subprocess.PIPE)
+ out = p.communicate(input=data)[0]
+
+ # insert into DB
+ sql = "INSERT INTO `filters` (`boards`, `type`, `action`, `from`, `to`, `staff`, `added`) VALUES ('%s', 0, 1, '%s', '%s', 'Anarko', '%s')" % (
+ board_pickle, _mysql.escape_string(name), _mysql.escape_string(to_filter), timestamp())
+ UpdateDb(sql)
+
+ msg = "Emoji %s agregado." % name
+ logAction(msg)
+ return renderTemplate('anarkia.html', {'mode': 99, 'msg': msg})
+ elif formdata.get('del'):
+ return renderTemplate('anarkia.html', {'mode': 99, 'msg': 'Del.'})
+ else:
+ filters = FetchAll(
+ "SELECT * FROM `filters` WHERE `boards` = '%s' ORDER BY `added` DESC" % board_pickle)
+ return renderTemplate('anarkia.html', {'mode': 8, 'emojis': filters})
+
def filetypelist():
- filetypes = FetchAll('SELECT * FROM `filetypes` ORDER BY `ext` ASC')
- return filetypes
+ filetypes = FetchAll('SELECT * FROM `filetypes` ORDER BY `ext` ASC')
+ return filetypes
+
def logAction(action):
- InsertDb("INSERT INTO `logs` (`timestamp`, `staff`, `action`) VALUES (" + str(timestamp()) + ", 'Anarko', '" + _mysql.escape_string(action) + "')") \ No newline at end of file
+ InsertDb("INSERT INTO `logs` (`timestamp`, `staff`, `action`) VALUES (" +
+ str(timestamp()) + ", 'Anarko', '" + _mysql.escape_string(action) + "')")
diff --git a/cgi/api.py b/cgi/api.py
index 828dff1..6861809 100644
--- a/cgi/api.py
+++ b/cgi/api.py
@@ -7,417 +7,435 @@ from framework import *
from database import *
from post import *
+
def api(self, path_split):
- if len(path_split) > 2:
- try:
- self.output = api_process(self, path_split)
- except APIError, e:
- self.output = api_error("error", e.message)
- except UserError, e:
- self.output = api_error("failed", e.message)
- except Exception, e:
- import sys, traceback
- exc_type, exc_value, exc_traceback = sys.exc_info()
- detail = ["%s : %s : %s : %s" % (os.path.basename(o[0]),o[1],o[2],o[3]) for o in traceback.extract_tb(exc_traceback)]
-
- self.output = api_error("exception", str(e), str(type(e)), detail)
- else:
- self.output = api_error("error", "No method specified")
+ if len(path_split) > 2:
+ try:
+ self.output = api_process(self, path_split)
+ except APIError, e:
+ self.output = api_error("error", e.message)
+ except UserError, e:
+ self.output = api_error("failed", e.message)
+ except Exception, e:
+ import sys
+ import traceback
+ exc_type, exc_value, exc_traceback = sys.exc_info()
+ detail = ["%s : %s : %s : %s" % (os.path.basename(
+ o[0]), o[1], o[2], o[3]) for o in traceback.extract_tb(exc_traceback)]
-def api_process(self, path_split):
- formdata = self.formdata
- ip = self.environ["REMOTE_ADDR"]
- t = time.time()
- method = path_split[2]
-
- #bans = ['181.72.116.62']
- bans = []
- if ip in bans:
- raise APIError, "You have been blacklisted."
-
- #with open('../api_log.txt', 'a') as f:
- # logstr = "[%s] %s: %s\n" % (formatTimestamp(t), ip, repr(path_split))
- # f.write(logstr)
-
- values = {'state': 'success'}
-
- if method == 'boards':
- boards = FetchAll('SELECT dir, name, board_type, allow_images, allow_image_replies, maxsize FROM `boards` WHERE `secret`=0 ORDER BY `sort` ASC')
- values['boards'] = boards
- for board in values['boards']:
- board['board_type'] = int(board['board_type'])
- board['allow_images'] = int(board['allow_images'])
- board['allow_image_replies'] = int(board['allow_image_replies'])
- board['maxsize'] = int(board['maxsize'])
-
- elif method == 'last':
- data_limit = formdata.get('limit')
- data_since = formdata.get('since')
-
- limit = 10
- since = 0
-
- if data_limit:
- try:
- limit = int(data_limit)
- except ValueError:
- raise APIError, "Limit must be numeric"
-
- if data_since:
- try:
- since = int(data_since)
- except ValueError:
- raise APIError, "Since must be numeric"
-
- if limit > 50:
- raise APIError, "Maximum limit is 50"
-
- sql = "SELECT posts.id, boards.dir, timestamp, timestamp_formatted, posts.name, tripcode, email, posts.subject, posts.message, file, file_size, image_height, image_width, thumb, thumb_width, thumb_height, parentid FROM posts INNER JOIN boards ON boardid = boards.id WHERE timestamp > %d AND IS_DELETED = 0 AND boards.secret = 0 ORDER BY timestamp DESC LIMIT %d" % (since, limit)
- values['posts'] = FetchAll(sql)
-
- for post in values['posts']:
- post['id'] = int(post['id'])
- post['timestamp'] = int(post['timestamp'])
- post['parentid'] = int(post['parentid'])
- post['file_size'] = int(post['file_size'])
- post['image_width'] = int(post['image_width'])
- post['image_height'] = int(post['image_height'])
- post['thumb_width'] = int(post['thumb_width'])
- post['thumb_height'] = int(post['thumb_height'])
- post['message'] = post['message'].decode('utf-8', 'replace')
- elif method == 'lastage':
- data_limit = formdata.get('limit')
- data_time = formdata.get('time', 0)
-
- limit = 30
-
- if data_limit:
- try:
- limit = int(data_limit)
- except ValueError:
- raise APIError, "Limit must be numeric"
-
- if limit > 50:
- raise APIError, "Maximum limit is 50"
-
- threads = getLastAge(limit)
- if threads[0]['bumped'] > int(data_time):
- values['threads'] = threads
+ self.output = api_error("exception", str(e), str(type(e)), detail)
else:
- values['threads'] = []
- elif method == 'list':
- data_board = formdata.get('dir')
- data_offset = formdata.get('offset')
- data_limit = formdata.get('limit')
- data_replies = formdata.get('replies')
- offset = 0
- limit = 10
- numreplies = 2
-
- if not data_board:
- raise APIError, "Missing parameters"
-
- if data_limit:
- try:
- limit = int(data_limit)
- except ValueError:
- raise APIError, "Limit must be numeric"
-
- if data_offset:
- try:
- offset = int(data_offset)
- except ValueError:
- raise APIError, "Offset must be numeric"
-
- if data_replies:
- try:
- numreplies = int(data_replies)
- except ValueError:
- raise APIError, "Replies must be numeric"
-
- if data_replies and limit > 30:
- raise APIError, "Maximum limit is 30"
-
- board = setBoard(data_board)
-
- #sql = "SELECT id, timestamp, bumped, timestamp_formatted, name, tripcode, email, subject, message, file, thumb FROM posts WHERE boardid = %s AND parentid = 0 AND IS_DELETED = 0 ORDER BY bumped DESC LIMIT %d" % (board['id'], limit)
- sql = "SELECT p.id, p.timestamp, p.bumped, p.expires, p.expires_formatted, p.timestamp_formatted, p.name, p.tripcode, p.email, p.subject, p.message, p.file, p.file_size, p.image_width, p.image_height, p.thumb, p.thumb_height, p.thumb_width, p.locked, coalesce(x.count,0) AS total_replies, coalesce(x.files,0) AS total_files FROM `posts` AS p LEFT JOIN (SELECT parentid, count(1) as count, count(nullif(file, '')) as files FROM `posts` WHERE boardid = %(board)s GROUP BY parentid) AS x ON p.id=x.parentid WHERE p.parentid = 0 AND p.boardid = %(board)s AND p.IS_DELETED = 0 ORDER BY `bumped` DESC LIMIT %(limit)d OFFSET %(offset)d" % {'board': board["id"], 'limit': limit, 'offset': offset}
-
- threads = FetchAll(sql)
-
- if numreplies:
- for thread in threads:
- lastreplies = FetchAll("SELECT id, timestamp, timestamp_formatted, name, tripcode, email, subject, message, file, file_size, image_height, image_width, thumb, thumb_width, thumb_height, IS_DELETED FROM `posts` WHERE parentid = %s AND boardid = %s ORDER BY `timestamp` DESC LIMIT %d" % (thread['id'], board['id'], numreplies))
- lastreplies = lastreplies[::-1]
- thread['id'] = int(thread['id'])
- thread['timestamp'] = int(thread['timestamp'])
- thread['bumped'] = int(thread['bumped'])
- thread['expires'] = int(thread['expires'])
- thread['total_replies'] = int(thread['total_replies'])
- thread['total_files'] = int(thread['total_files'])
- thread['file_size'] = int(thread['file_size'])
- thread['image_width'] = int(thread['image_width'])
- thread['image_height'] = int(thread['image_height'])
- thread['thumb_width'] = int(thread['thumb_width'])
- thread['thumb_height'] = int(thread['thumb_height'])
- thread['locked'] = int(thread['locked'])
-
- thread['replies'] = []
-
- for post in lastreplies:
- post['IS_DELETED'] = int(post['IS_DELETED'])
- post['id'] = int(post['id'])
- post['timestamp'] = int(post['timestamp'])
-
- if post['IS_DELETED']:
+ self.output = api_error("error", "No method specified")
+
+
+def api_process(self, path_split):
+ formdata = self.formdata
+ ip = self.environ["REMOTE_ADDR"]
+ t = time.time()
+ method = path_split[2]
+
+ #bans = ['181.72.116.62']
+ bans = []
+ if ip in bans:
+ raise APIError, "You have been blacklisted."
+
+ # with open('../api_log.txt', 'a') as f:
+ # logstr = "[%s] %s: %s\n" % (formatTimestamp(t), ip, repr(path_split))
+ # f.write(logstr)
+
+ values = {'state': 'success'}
+
+ if method == 'boards':
+ boards = FetchAll(
+ 'SELECT dir, name, board_type, allow_images, allow_image_replies, maxsize FROM `boards` WHERE `secret`=0 ORDER BY `sort` ASC')
+ values['boards'] = boards
+ for board in values['boards']:
+ board['board_type'] = int(board['board_type'])
+ board['allow_images'] = int(board['allow_images'])
+ board['allow_image_replies'] = int(board['allow_image_replies'])
+ board['maxsize'] = int(board['maxsize'])
+
+ elif method == 'last':
+ data_limit = formdata.get('limit')
+ data_since = formdata.get('since')
+
+ limit = 10
+ since = 0
+
+ if data_limit:
+ try:
+ limit = int(data_limit)
+ except ValueError:
+ raise APIError, "Limit must be numeric"
+
+ if data_since:
+ try:
+ since = int(data_since)
+ except ValueError:
+ raise APIError, "Since must be numeric"
+
+ if limit > 50:
+ raise APIError, "Maximum limit is 50"
+
+ sql = "SELECT posts.id, boards.dir, timestamp, timestamp_formatted, posts.name, tripcode, email, posts.subject, posts.message, file, file_size, image_height, image_width, thumb, thumb_width, thumb_height, parentid FROM posts INNER JOIN boards ON boardid = boards.id WHERE timestamp > %d AND IS_DELETED = 0 AND boards.secret = 0 ORDER BY timestamp DESC LIMIT %d" % (
+ since, limit)
+ values['posts'] = FetchAll(sql)
+
+ for post in values['posts']:
+ post['id'] = int(post['id'])
+ post['timestamp'] = int(post['timestamp'])
+ post['parentid'] = int(post['parentid'])
+ post['file_size'] = int(post['file_size'])
+ post['image_width'] = int(post['image_width'])
+ post['image_height'] = int(post['image_height'])
+ post['thumb_width'] = int(post['thumb_width'])
+ post['thumb_height'] = int(post['thumb_height'])
+ post['message'] = post['message'].decode('utf-8', 'replace')
+ elif method == 'lastage':
+ data_limit = formdata.get('limit')
+ data_time = formdata.get('time', 0)
+
+ limit = 30
+
+ if data_limit:
+ try:
+ limit = int(data_limit)
+ except ValueError:
+ raise APIError, "Limit must be numeric"
+
+ if limit > 50:
+ raise APIError, "Maximum limit is 50"
+
+ threads = getLastAge(limit)
+ if threads[0]['bumped'] > int(data_time):
+ values['threads'] = threads
+ else:
+ values['threads'] = []
+ elif method == 'list':
+ data_board = formdata.get('dir')
+ data_offset = formdata.get('offset')
+ data_limit = formdata.get('limit')
+ data_replies = formdata.get('replies')
+ offset = 0
+ limit = 10
+ numreplies = 2
+
+ if not data_board:
+ raise APIError, "Missing parameters"
+
+ if data_limit:
+ try:
+ limit = int(data_limit)
+ except ValueError:
+ raise APIError, "Limit must be numeric"
+
+ if data_offset:
+ try:
+ offset = int(data_offset)
+ except ValueError:
+ raise APIError, "Offset must be numeric"
+
+ if data_replies:
+ try:
+ numreplies = int(data_replies)
+ except ValueError:
+ raise APIError, "Replies must be numeric"
+
+ if data_replies and limit > 30:
+ raise APIError, "Maximum limit is 30"
+
+ board = setBoard(data_board)
+
+ #sql = "SELECT id, timestamp, bumped, timestamp_formatted, name, tripcode, email, subject, message, file, thumb FROM posts WHERE boardid = %s AND parentid = 0 AND IS_DELETED = 0 ORDER BY bumped DESC LIMIT %d" % (board['id'], limit)
+ sql = "SELECT p.id, p.timestamp, p.bumped, p.expires, p.expires_formatted, p.timestamp_formatted, p.name, p.tripcode, p.email, p.subject, p.message, p.file, p.file_size, p.image_width, p.image_height, p.thumb, p.thumb_height, p.thumb_width, p.locked, coalesce(x.count,0) AS total_replies, coalesce(x.files,0) AS total_files FROM `posts` AS p LEFT JOIN (SELECT parentid, count(1) as count, count(nullif(file, '')) as files FROM `posts` WHERE boardid = %(board)s GROUP BY parentid) AS x ON p.id=x.parentid WHERE p.parentid = 0 AND p.boardid = %(board)s AND p.IS_DELETED = 0 ORDER BY `bumped` DESC LIMIT %(limit)d OFFSET %(offset)d" % {
+ 'board': board["id"], 'limit': limit, 'offset': offset}
+
+ threads = FetchAll(sql)
+
+ if numreplies:
+ for thread in threads:
+ lastreplies = FetchAll("SELECT id, timestamp, timestamp_formatted, name, tripcode, email, subject, message, file, file_size, image_height, image_width, thumb, thumb_width, thumb_height, IS_DELETED FROM `posts` WHERE parentid = %s AND boardid = %s ORDER BY `timestamp` DESC LIMIT %d" % (
+ thread['id'], board['id'], numreplies))
+ lastreplies = lastreplies[::-1]
+ thread['id'] = int(thread['id'])
+ thread['timestamp'] = int(thread['timestamp'])
+ thread['bumped'] = int(thread['bumped'])
+ thread['expires'] = int(thread['expires'])
+ thread['total_replies'] = int(thread['total_replies'])
+ thread['total_files'] = int(thread['total_files'])
+ thread['file_size'] = int(thread['file_size'])
+ thread['image_width'] = int(thread['image_width'])
+ thread['image_height'] = int(thread['image_height'])
+ thread['thumb_width'] = int(thread['thumb_width'])
+ thread['thumb_height'] = int(thread['thumb_height'])
+ thread['locked'] = int(thread['locked'])
+
+ thread['replies'] = []
+
+ for post in lastreplies:
+ post['IS_DELETED'] = int(post['IS_DELETED'])
+ post['id'] = int(post['id'])
+ post['timestamp'] = int(post['timestamp'])
+
+ if post['IS_DELETED']:
+ empty_post = {'id': post['id'],
+ 'IS_DELETED': post['IS_DELETED'],
+ 'timestamp': post['timestamp'],
+ }
+ thread['replies'].append(empty_post)
+ else:
+ post['file_size'] = int(post['file_size'])
+ post['image_width'] = int(post['image_width'])
+ post['image_height'] = int(post['image_height'])
+ post['thumb_width'] = int(post['thumb_width'])
+ post['thumb_height'] = int(post['thumb_height'])
+ post['message'] = post['message'].decode(
+ 'utf-8', 'replace')
+
+ thread['replies'].append(post)
+
+ values['threads'] = threads
+ elif method == 'thread':
+ data_board = formdata.get('dir')
+ data_threadid = formdata.get('id')
+ data_threadts = formdata.get('ts')
+ data_offset = formdata.get('offset')
+ data_limit = formdata.get('limit')
+ data_striphtml = formdata.get('nohtml')
+ striphtml = False
+ offset = 0
+ limit = 1000
+
+ if not data_board or (not data_threadid and not data_threadts):
+ raise APIError, "Missing parameters"
+
+ if data_limit:
+ try:
+ limit = int(data_limit)
+ except ValueError:
+ raise APIError, "Limit must be numeric"
+
+ if data_offset:
+ try:
+ offset = int(data_offset)
+ except ValueError:
+ raise APIError, "Offset must be numeric"
+
+ if data_striphtml:
+ if int(data_striphtml) == 1:
+ striphtml = True
+
+ board = setBoard(data_board)
+ search_field = 'id'
+ search_val = 0
+
+ try:
+ search_val = int(data_threadid)
+ except (ValueError, TypeError):
+ pass
+
+ try:
+ search_val = int(data_threadts)
+ search_field = 'timestamp'
+ except (ValueError, TypeError):
+ pass
+
+ if not search_val:
+ raise APIError, "No thread ID"
+
+ op_post = FetchOne("SELECT id, timestamp, subject, locked FROM posts WHERE `%s` = '%d' AND boardid = '%s' AND parentid = 0" % (
+ search_field, search_val, board["id"]))
+
+ if not op_post:
+ raise APIError, "Not a thread"
+
+ values['id'] = int(op_post['id'])
+ values['timestamp'] = int(op_post['timestamp'])
+ values['subject'] = op_post['subject']
+ values['locked'] = int(op_post['locked'])
+
+ total_replies = int(FetchOne("SELECT COUNT(1) FROM posts WHERE boardid = '%s' AND parentid = '%d'" % (
+ board["id"], values['id']), 0)[0])
+
+ values['total_replies'] = total_replies
+
+ sql = "SELECT id, parentid, timestamp, timestamp_formatted, name, tripcode, email, subject, message, file, file_size, image_width, image_height, thumb, thumb_width, thumb_height, IS_DELETED FROM posts WHERE boardid = %s AND (parentid = %s OR id = %s) ORDER BY id ASC LIMIT %d OFFSET %d" % (
+ _mysql.escape_string(board['id']), values['id'], values['id'], limit, offset)
+ posts = FetchAll(sql)
+
+ values['posts'] = []
+
+ for post in posts:
+ post['IS_DELETED'] = int(post['IS_DELETED'])
+ post['id'] = int(post['id'])
+ post['parentid'] = int(post['parentid'])
+ post['timestamp'] = int(post['timestamp'])
+
+ if post['IS_DELETED']:
+ empty_post = {'id': post['id'],
+ 'IS_DELETED': post['IS_DELETED'],
+ 'parentid': post['parentid'],
+ 'timestamp': post['timestamp'],
+ }
+ values['posts'].append(empty_post)
+ else:
+ post['file_size'] = int(post['file_size'])
+ post['image_width'] = int(post['image_width'])
+ post['image_height'] = int(post['image_height'])
+ post['thumb_width'] = int(post['thumb_width'])
+ post['thumb_height'] = int(post['thumb_height'])
+ post['message'] = post['message'].decode('utf-8', 'replace')
+ if striphtml:
+ post['message'] = post['message'].replace("<br />", " ")
+ post['message'] = re.compile(
+ r"<[^>]*?>", re.DOTALL | re.IGNORECASE).sub("", post['message'])
+ values['posts'].append(post)
+ elif method == 'get':
+ data_board = formdata.get('dir')
+ data_parentid = formdata.get('thread')
+ data_postid = formdata.get('id')
+ data_postnum = formdata.get('num')
+
+ if not data_board and (not data_postid or (not data_postnum and not data_parentid)):
+ raise APIError, "Missing parameters"
+
+ board = setBoard(data_board)
+ postid = 0
+
+ if data_postnum:
+ data_postid = getID(data_parentid, data_postid)
+
+ try:
+ postid = int(data_postid)
+ except ValueError:
+ raise APIError, "Post ID must be numeric"
+
+ post = FetchOne("SELECT id, parentid, timestamp, timestamp_formatted, name, tripcode, email, subject, message, file, file_size, image_width, image_height, thumb, thumb_width, thumb_height, IS_DELETED FROM posts WHERE `id`='%d' AND boardid='%s'" % (
+ postid, board["id"]))
+
+ if not post:
+ raise APIError, "Post ID cannot be found"
+
+ values['posts'] = []
+
+ post['IS_DELETED'] = int(post['IS_DELETED'])
+ post['id'] = int(post['id'])
+ post['parentid'] = int(post['parentid'])
+ post['timestamp'] = int(post['timestamp'])
+
+ if post['IS_DELETED']:
empty_post = {'id': post['id'],
- 'IS_DELETED': post['IS_DELETED'],
- 'timestamp': post['timestamp'],
- }
- thread['replies'].append(empty_post)
- else:
+ 'IS_DELETED': post['IS_DELETED'],
+ 'parentid': post['parentid'],
+ 'timestamp': post['timestamp'],
+ }
+ values['posts'].append(empty_post)
+ else:
post['file_size'] = int(post['file_size'])
post['image_width'] = int(post['image_width'])
post['image_height'] = int(post['image_height'])
post['thumb_width'] = int(post['thumb_width'])
post['thumb_height'] = int(post['thumb_height'])
post['message'] = post['message'].decode('utf-8', 'replace')
-
- thread['replies'].append(post)
-
- values['threads'] = threads
- elif method == 'thread':
- data_board = formdata.get('dir')
- data_threadid = formdata.get('id')
- data_threadts = formdata.get('ts')
- data_offset = formdata.get('offset')
- data_limit = formdata.get('limit')
- data_striphtml = formdata.get('nohtml')
- striphtml = False
- offset = 0
- limit = 1000
-
- if not data_board or (not data_threadid and not data_threadts):
- raise APIError, "Missing parameters"
-
- if data_limit:
- try:
- limit = int(data_limit)
- except ValueError:
- raise APIError, "Limit must be numeric"
-
- if data_offset:
- try:
- offset = int(data_offset)
- except ValueError:
- raise APIError, "Offset must be numeric"
-
- if data_striphtml:
- if int(data_striphtml) == 1:
- striphtml = True
-
- board = setBoard(data_board)
- search_field = 'id'
- search_val = 0
-
- try:
- search_val = int(data_threadid)
- except (ValueError, TypeError):
- pass
-
- try:
- search_val = int(data_threadts)
- search_field = 'timestamp'
- except (ValueError, TypeError):
- pass
-
- if not search_val:
- raise APIError, "No thread ID"
-
- op_post = FetchOne("SELECT id, timestamp, subject, locked FROM posts WHERE `%s` = '%d' AND boardid = '%s' AND parentid = 0" % (search_field, search_val, board["id"]))
-
- if not op_post:
- raise APIError, "Not a thread"
-
- values['id'] = int(op_post['id'])
- values['timestamp'] = int(op_post['timestamp'])
- values['subject'] = op_post['subject']
- values['locked'] = int(op_post['locked'])
-
- total_replies = int(FetchOne("SELECT COUNT(1) FROM posts WHERE boardid = '%s' AND parentid = '%d'" % (board["id"], values['id']), 0)[0])
-
- values['total_replies'] = total_replies
-
- sql = "SELECT id, parentid, timestamp, timestamp_formatted, name, tripcode, email, subject, message, file, file_size, image_width, image_height, thumb, thumb_width, thumb_height, IS_DELETED FROM posts WHERE boardid = %s AND (parentid = %s OR id = %s) ORDER BY id ASC LIMIT %d OFFSET %d" % (_mysql.escape_string(board['id']), values['id'], values['id'], limit, offset)
- posts = FetchAll(sql)
-
- values['posts'] = []
-
- for post in posts:
- post['IS_DELETED'] = int(post['IS_DELETED'])
- post['id'] = int(post['id'])
- post['parentid'] = int(post['parentid'])
- post['timestamp'] = int(post['timestamp'])
-
- if post['IS_DELETED']:
- empty_post = {'id': post['id'],
- 'IS_DELETED': post['IS_DELETED'],
- 'parentid': post['parentid'],
- 'timestamp': post['timestamp'],
- }
- values['posts'].append(empty_post)
- else:
- post['file_size'] = int(post['file_size'])
- post['image_width'] = int(post['image_width'])
- post['image_height'] = int(post['image_height'])
- post['thumb_width'] = int(post['thumb_width'])
- post['thumb_height'] = int(post['thumb_height'])
- post['message'] = post['message'].decode('utf-8', 'replace')
- if striphtml:
- post['message'] = post['message'].replace("<br />", " ")
- post['message'] = re.compile(r"<[^>]*?>", re.DOTALL | re.IGNORECASE).sub("", post['message'])
- values['posts'].append(post)
- elif method == 'get':
- data_board = formdata.get('dir')
- data_parentid = formdata.get('thread')
- data_postid = formdata.get('id')
- data_postnum = formdata.get('num')
-
- if not data_board and (not data_postid or (not data_postnum and not data_parentid)):
- raise APIError, "Missing parameters"
-
- board = setBoard(data_board)
- postid = 0
-
- if data_postnum:
- data_postid = getID(data_parentid, data_postid)
-
- try:
- postid = int(data_postid)
- except ValueError:
- raise APIError, "Post ID must be numeric"
-
- post = FetchOne("SELECT id, parentid, timestamp, timestamp_formatted, name, tripcode, email, subject, message, file, file_size, image_width, image_height, thumb, thumb_width, thumb_height, IS_DELETED FROM posts WHERE `id`='%d' AND boardid='%s'" % (postid, board["id"]))
-
- if not post:
- raise APIError, "Post ID cannot be found"
-
- values['posts'] = []
-
- post['IS_DELETED'] = int(post['IS_DELETED'])
- post['id'] = int(post['id'])
- post['parentid'] = int(post['parentid'])
- post['timestamp'] = int(post['timestamp'])
-
- if post['IS_DELETED']:
- empty_post = {'id': post['id'],
- 'IS_DELETED': post['IS_DELETED'],
- 'parentid': post['parentid'],
- 'timestamp': post['timestamp'],
- }
- values['posts'].append(empty_post)
- else:
- post['file_size'] = int(post['file_size'])
- post['image_width'] = int(post['image_width'])
- post['image_height'] = int(post['image_height'])
- post['thumb_width'] = int(post['thumb_width'])
- post['thumb_height'] = int(post['thumb_height'])
- post['message'] = post['message'].decode('utf-8', 'replace')
- values['posts'].append(post)
- elif method == 'delete':
- data_board = formdata.get('dir')
- data_postid = formdata.get('id')
- data_imageonly = formdata.get('imageonly')
- data_password = formdata.get('password')
-
- if not data_board or not data_postid or not data_password:
- raise APIError, "Missing parameters"
-
- imageonly = False
- board = setBoard(data_board)
-
- try:
- postid = int(data_postid)
- except ValueError:
- raise APIError, "Post ID must be numeric"
-
- if data_imageonly and data_imageonly == 1:
- imageonly = True
-
- deletePost(postid, data_password, board['recyclebin'], imageonly)
- elif method == 'post':
- boarddir = formdata.get('board')
-
- if not boarddir:
- raise APIError, "Missing parameters"
-
- parent = formdata.get('parent')
- trap1 = formdata.get('name', '')
- trap2 = formdata.get('email', '')
- name = formdata.get('fielda', '')
- email = formdata.get('fieldb', '')
- subject = formdata.get('subject', '')
- message = formdata.get('message', '')
- file = formdata.get('file')
- file_original = formdata.get('file_original')
- spoil = formdata.get('spoil')
- oek_file = formdata.get('oek_file')
- password = formdata.get('password', '')
- noimage = formdata.get('noimage')
- mobile = ("mobile" in formdata.keys())
-
- # call post function
- (post_url, ttaken, postid) = self.make_post(ip, boarddir, parent, trap1, trap2, name, email, subject, message, file, file_original, spoil, oek_file, password, noimage, mobile)
-
- values['post_url'] = post_url
- values['time_taken'] = ttaken
- values['post_id'] = postid
- elif method == 'newThreads':
- data_limit = formdata.get('limit')
- limit = 30
- if data_limit:
- try:
- limit = int(data_limit)
- except ValueError:
- raise APIError, "Limit must be numeric"
-
- if limit > 30:
- raise APIError, "Maximum limit is 30"
-
- threads = getNewThreads(limit)
- values['threads'] = threads
- elif method == "blotter":
- latest_news = FetchAll("SELECT `timestamp`, `message`, `timestamp_formatted` FROM `news` WHERE `type` = '2' ORDER BY `timestamp` DESC LIMIT " + str(Settings.HOME_NEWS))
- values["news"] = latest_news
- elif method == 'boardsExtra':
- boards = FetchAll('SELECT dir, name, longname, subname, postarea_desc, postarea_extra, anonymous, subject, message, disable_name, disable_subject, allow_spoilers, allow_oekaki, numthreads, board_type, allow_images, allow_image_replies, maxsize FROM `boards` WHERE `secret`=0 ORDER BY `sort` ASC')
- values['boards'] = boards
- for board in values['boards']:
- board['board_type'] = int(board['board_type'])
- board['allow_images'] = int(board['allow_images'])
- board['allow_image_replies'] = int(board['allow_image_replies'])
- board['disable_name'] = int(board['disable_name'])
- board['disable_subject'] = int(board['disable_subject'])
- board['allow_spoilers'] = int(board['allow_spoilers'])
- board['allow_oekaki'] = int(board['allow_oekaki'])
- board['numthreads'] = int(board['numthreads'])
- board['maxsize'] = int(board['maxsize'])
- else:
- raise APIError, "Invalid method"
-
- values['time'] = int(t)
- #values['time_taken'] = time.time() - t
- return json.dumps(values, sort_keys=True, separators=(',',':'))
-
+ values['posts'].append(post)
+ elif method == 'delete':
+ data_board = formdata.get('dir')
+ data_postid = formdata.get('id')
+ data_imageonly = formdata.get('imageonly')
+ data_password = formdata.get('password')
+
+ if not data_board or not data_postid or not data_password:
+ raise APIError, "Missing parameters"
+
+ imageonly = False
+ board = setBoard(data_board)
+
+ try:
+ postid = int(data_postid)
+ except ValueError:
+ raise APIError, "Post ID must be numeric"
+
+ if data_imageonly and data_imageonly == 1:
+ imageonly = True
+
+ deletePost(postid, data_password, board['recyclebin'], imageonly)
+ elif method == 'post':
+ boarddir = formdata.get('board')
+
+ if not boarddir:
+ raise APIError, "Missing parameters"
+
+ parent = formdata.get('parent')
+ trap1 = formdata.get('name', '')
+ trap2 = formdata.get('email', '')
+ name = formdata.get('fielda', '')
+ email = formdata.get('fieldb', '')
+ subject = formdata.get('subject', '')
+ message = formdata.get('message', '')
+ file = formdata.get('file')
+ file_original = formdata.get('file_original')
+ spoil = formdata.get('spoil')
+ oek_file = formdata.get('oek_file')
+ password = formdata.get('password', '')
+ noimage = formdata.get('noimage')
+ mobile = ("mobile" in formdata.keys())
+
+ # call post function
+ (post_url, ttaken, postid) = self.make_post(ip, boarddir, parent, trap1, trap2, name,
+ email, subject, message, file, file_original, spoil, oek_file, password, noimage, mobile)
+
+ values['post_url'] = post_url
+ values['time_taken'] = ttaken
+ values['post_id'] = postid
+ elif method == 'newThreads':
+ data_limit = formdata.get('limit')
+ limit = 30
+ if data_limit:
+ try:
+ limit = int(data_limit)
+ except ValueError:
+ raise APIError, "Limit must be numeric"
+
+ if limit > 30:
+ raise APIError, "Maximum limit is 30"
+
+ threads = getNewThreads(limit)
+ values['threads'] = threads
+ elif method == "blotter":
+ latest_news = FetchAll(
+ "SELECT `timestamp`, `message`, `timestamp_formatted` FROM `news` WHERE `type` = '2' ORDER BY `timestamp` DESC LIMIT " + str(Settings.HOME_NEWS))
+ values["news"] = latest_news
+ elif method == 'boardsExtra':
+ boards = FetchAll('SELECT dir, name, longname, subname, postarea_desc, postarea_extra, anonymous, subject, message, disable_name, disable_subject, allow_spoilers, allow_oekaki, numthreads, board_type, allow_images, allow_image_replies, maxsize FROM `boards` WHERE `secret`=0 ORDER BY `sort` ASC')
+ values['boards'] = boards
+ for board in values['boards']:
+ board['board_type'] = int(board['board_type'])
+ board['allow_images'] = int(board['allow_images'])
+ board['allow_image_replies'] = int(board['allow_image_replies'])
+ board['disable_name'] = int(board['disable_name'])
+ board['disable_subject'] = int(board['disable_subject'])
+ board['allow_spoilers'] = int(board['allow_spoilers'])
+ board['allow_oekaki'] = int(board['allow_oekaki'])
+ board['numthreads'] = int(board['numthreads'])
+ board['maxsize'] = int(board['maxsize'])
+ else:
+ raise APIError, "Invalid method"
+
+ values['time'] = int(t)
+ #values['time_taken'] = time.time() - t
+ return json.dumps(values, sort_keys=True, separators=(',', ':'))
+
+
def api_error(errtype, msg, type=None, detail=None):
- values = {'state': errtype, 'message': msg}
-
- if type:
- values['type'] = type
- if detail:
- values['detail'] = detail
-
- return json.dumps(values)
-
+ values = {'state': errtype, 'message': msg}
+
+ if type:
+ values['type'] = type
+ if detail:
+ values['detail'] = detail
+
+ return json.dumps(values)
+
+
class APIError(Exception):
- pass
+ pass
diff --git a/cgi/database.py b/cgi/database.py
index c8611c5..9b2c1e7 100644
--- a/cgi/database.py
+++ b/cgi/database.py
@@ -7,63 +7,69 @@ from settings import Settings
database_lock = threading.Lock()
try:
- # Although SQLAlchemy is optional, it is highly recommended
- import sqlalchemy.pool as pool
- _mysql = pool.manage( module = _mysql,
- pool_size = Settings.DATABASE_POOL_SIZE,
- max_overflow = Settings.DATABASE_POOL_OVERFLOW)
- Settings._.USING_SQLALCHEMY = True
+ # Although SQLAlchemy is optional, it is highly recommended
+ import sqlalchemy.pool as pool
+ _mysql = pool.manage(module=_mysql,
+ pool_size=Settings.DATABASE_POOL_SIZE,
+ max_overflow=Settings.DATABASE_POOL_OVERFLOW)
+ Settings._.USING_SQLALCHEMY = True
except ImportError:
- pass
+ pass
+
def OpenDb():
- if Settings._.CONN is None:
- Settings._.CONN = _mysql.connect(host = Settings.DATABASE_HOST,
- user = Settings.DATABASE_USERNAME,
- passwd = Settings.DATABASE_PASSWORD,
- db = Settings.DATABASE_DB)
+ if Settings._.CONN is None:
+ Settings._.CONN = _mysql.connect(host=Settings.DATABASE_HOST,
+ user=Settings.DATABASE_USERNAME,
+ passwd=Settings.DATABASE_PASSWORD,
+ db=Settings.DATABASE_DB)
+
def FetchAll(query, method=1):
- """
- Query and fetch all results as a list
- """
- db = Settings._.CONN
-
- db.query(query)
- r = db.use_result()
- return r.fetch_row(0, method)
+ """
+ Query and fetch all results as a list
+ """
+ db = Settings._.CONN
+
+ db.query(query)
+ r = db.use_result()
+ return r.fetch_row(0, method)
+
def FetchOne(query, method=1):
- """
- Query and fetch only the first result
- """
- db = Settings._.CONN
-
- db.query(query)
- r = db.use_result()
- try:
- return r.fetch_row(1, method)[0]
- except:
- return None
+ """
+ Query and fetch only the first result
+ """
+ db = Settings._.CONN
+
+ db.query(query)
+ r = db.use_result()
+ try:
+ return r.fetch_row(1, method)[0]
+ except:
+ return None
+
def UpdateDb(query):
- """
- Update the DB (UPDATE/DELETE) and return # of affected rows
- """
- db = Settings._.CONN
-
- db.query(query)
- return db.affected_rows()
-
+ """
+ Update the DB (UPDATE/DELETE) and return # of affected rows
+ """
+ db = Settings._.CONN
+
+ db.query(query)
+ return db.affected_rows()
+
+
def InsertDb(query):
- """
- Insert into the DB and return the primary key of new row
- """
- db = Settings._.CONN
-
- db.query(query)
- return db.insert_id()
+ """
+ Insert into the DB and return the primary key of new row
+ """
+ db = Settings._.CONN
+
+ db.query(query)
+ return db.insert_id()
+
def CloseDb():
- if Settings._.CONN is not None:
- Settings._.CONN.close()
+ if Settings._.CONN is not None:
+ Settings._.CONN.close()
diff --git a/cgi/fcgi.py b/cgi/fcgi.py
index e59f8c8..08af980 100644
--- a/cgi/fcgi.py
+++ b/cgi/fcgi.py
@@ -137,11 +137,13 @@ if __debug__:
except:
pass
+
class InputStream(object):
"""
File-like object representing FastCGI input streams (FCGI_STDIN and
FCGI_DATA). Supports the minimum methods required by WSGI spec.
"""
+
def __init__(self, conn):
self._conn = conn
@@ -150,10 +152,10 @@ class InputStream(object):
self._buf = ''
self._bufList = []
- self._pos = 0 # Current read position.
- self._avail = 0 # Number of bytes currently available.
+ self._pos = 0 # Current read position.
+ self._avail = 0 # Number of bytes currently available.
- self._eof = False # True when server has sent EOF notification.
+ self._eof = False # True when server has sent EOF notification.
def _shrinkBuffer(self):
"""Gets rid of already read data (since we can't rewind)."""
@@ -253,12 +255,14 @@ class InputStream(object):
self._bufList.append(data)
self._avail += len(data)
+
class MultiplexedInputStream(InputStream):
"""
A version of InputStream meant to be used with MultiplexedConnections.
Assumes the MultiplexedConnection (the producer) and the Request
(the consumer) are running in different threads.
"""
+
def __init__(self, conn):
super(MultiplexedInputStream, self).__init__(conn)
@@ -295,18 +299,20 @@ class MultiplexedInputStream(InputStream):
finally:
self._lock.release()
+
class OutputStream(object):
"""
FastCGI output stream (FCGI_STDOUT/FCGI_STDERR). By default, calls to
write() or writelines() immediately result in Records being sent back
to the server. Buffering should be done in a higher level!
"""
+
def __init__(self, conn, req, type, buffered=False):
self._conn = conn
self._req = req
self._type = type
self._buffered = buffered
- self._bufList = [] # Used if buffered is True
+ self._bufList = [] # Used if buffered is True
self.dataWritten = False
self.closed = False
@@ -358,11 +364,13 @@ class OutputStream(object):
self._conn.writeRecord(rec)
self.closed = True
+
class TeeOutputStream(object):
"""
Simple wrapper around two or more output file-like objects that copies
written data to all streams.
"""
+
def __init__(self, streamList):
self._streamList = streamList
@@ -378,10 +386,12 @@ class TeeOutputStream(object):
for f in self._streamList:
f.flush()
+
class StdoutWrapper(object):
"""
Wrapper for sys.stdout so we know if data has actually been written.
"""
+
def __init__(self, stdout):
self._file = stdout
self.dataWritten = False
@@ -398,6 +408,7 @@ class StdoutWrapper(object):
def __getattr__(self, name):
return getattr(self._file, name)
+
def decode_pair(s, pos=0):
"""
Decodes a name/value pair.
@@ -426,6 +437,7 @@ def decode_pair(s, pos=0):
return (pos, (name, value))
+
def encode_pair(name, value):
"""
Encodes a name/value pair.
@@ -445,13 +457,15 @@ def encode_pair(name, value):
s += struct.pack('!L', valueLength | 0x80000000L)
return s + name + value
-
+
+
class Record(object):
"""
A FastCGI Record.
Used for encoding/decoding records.
"""
+
def __init__(self, type=FCGI_UNKNOWN_TYPE, requestId=FCGI_NULL_REQUEST_ID):
self.version = FCGI_VERSION_1
self.type = type
@@ -476,7 +490,7 @@ class Record(object):
continue
else:
raise
- if not data: # EOF
+ if not data: # EOF
break
dataList.append(data)
dataLen = len(data)
@@ -494,15 +508,16 @@ class Record(object):
if length < FCGI_HEADER_LEN:
raise EOFError
-
+
self.version, self.type, self.requestId, self.contentLength, \
- self.paddingLength = struct.unpack(FCGI_Header, header)
+ self.paddingLength = struct.unpack(FCGI_Header, header)
+
+ if __debug__:
+ _debug(9, 'read: fd = %d, type = %d, requestId = %d, '
+ 'contentLength = %d' %
+ (sock.fileno(), self.type, self.requestId,
+ self.contentLength))
- if __debug__: _debug(9, 'read: fd = %d, type = %d, requestId = %d, '
- 'contentLength = %d' %
- (sock.fileno(), self.type, self.requestId,
- self.contentLength))
-
if self.contentLength:
try:
self.contentData, length = self._recvall(sock,
@@ -541,10 +556,11 @@ class Record(object):
"""Encode and write a Record to a socket."""
self.paddingLength = -self.contentLength & 7
- if __debug__: _debug(9, 'write: fd = %d, type = %d, requestId = %d, '
- 'contentLength = %d' %
- (sock.fileno(), self.type, self.requestId,
- self.contentLength))
+ if __debug__:
+ _debug(9, 'write: fd = %d, type = %d, requestId = %d, '
+ 'contentLength = %d' %
+ (sock.fileno(), self.type, self.requestId,
+ self.contentLength))
header = struct.pack(FCGI_Header, self.version, self.type,
self.requestId, self.contentLength,
@@ -554,7 +570,8 @@ class Record(object):
self._sendall(sock, self.contentData)
if self.paddingLength:
self._sendall(sock, '\x00'*self.paddingLength)
-
+
+
class Request(object):
"""
Represents a single FastCGI request.
@@ -564,6 +581,7 @@ class Request(object):
be called by your handler. However, server, params, stdin, stdout,
stderr, and data are free for your handler's use.
"""
+
def __init__(self, conn, inputStreamClass):
self._conn = conn
@@ -586,35 +604,38 @@ class Request(object):
protocolStatus, appStatus = FCGI_REQUEST_COMPLETE, 0
- if __debug__: _debug(1, 'protocolStatus = %d, appStatus = %d' %
- (protocolStatus, appStatus))
+ if __debug__:
+ _debug(1, 'protocolStatus = %d, appStatus = %d' %
+ (protocolStatus, appStatus))
self._flush()
self._end(appStatus, protocolStatus)
def _end(self, appStatus=0L, protocolStatus=FCGI_REQUEST_COMPLETE):
self._conn.end_request(self, appStatus, protocolStatus)
-
+
def _flush(self):
self.stdout.close()
self.stderr.close()
+
class CGIRequest(Request):
"""A normal CGI request disguised as a FastCGI request."""
+
def __init__(self, server):
# These are normally filled in by Connection.
self.requestId = 1
self.role = FCGI_RESPONDER
self.flags = 0
self.aborted = False
-
+
self.server = server
self.params = dict(os.environ)
self.stdin = sys.stdin
- self.stdout = StdoutWrapper(sys.stdout) # Oh, the humanity!
+ self.stdout = StdoutWrapper(sys.stdout) # Oh, the humanity!
self.stderr = sys.stderr
self.data = StringIO.StringIO()
-
+
def _end(self, appStatus=0L, protocolStatus=FCGI_REQUEST_COMPLETE):
sys.exit(appStatus)
@@ -622,6 +643,7 @@ class CGIRequest(Request):
# Not buffered, do nothing.
pass
+
class Connection(object):
"""
A Connection with the web server.
@@ -655,7 +677,7 @@ class Connection(object):
except:
pass
self._sock.close()
-
+
def run(self):
"""Begin processing data from the socket."""
self._keepGoing = True
@@ -665,7 +687,7 @@ class Connection(object):
except EOFError:
break
except (select.error, socket.error), e:
- if e[0] == errno.EBADF: # Socket was closed by Request.
+ if e[0] == errno.EBADF: # Socket was closed by Request.
break
raise
@@ -684,7 +706,8 @@ class Connection(object):
# Sigh. ValueError gets thrown sometimes when passing select
# a closed socket.
raise EOFError
- if r: break
+ if r:
+ break
if not self._keepGoing:
return
rec = Record()
@@ -733,7 +756,8 @@ class Connection(object):
if remove:
del self._requests[req.requestId]
- if __debug__: _debug(2, 'end_request: flags = %d' % req.flags)
+ if __debug__:
+ _debug(2, 'end_request: flags = %d' % req.flags)
if not (req.flags & FCGI_KEEP_CONN) and not self._requests:
self._cleanupSocket()
@@ -816,7 +840,8 @@ class Connection(object):
outrec.contentData = struct.pack(FCGI_UnknownTypeBody, inrec.type)
outrec.contentLength = FCGI_UnknownTypeBody_LEN
self.writeRecord(rec)
-
+
+
class MultiplexedConnection(Connection):
"""
A version of Connection capable of handling multiple requests
@@ -843,7 +868,7 @@ class MultiplexedConnection(Connection):
self._lock.release()
super(MultiplexedConnection, self)._cleanupSocket()
-
+
def writeRecord(self, rec):
# Must use locking to prevent intermingling of Records from different
# threads.
@@ -902,7 +927,8 @@ class MultiplexedConnection(Connection):
super(MultiplexedConnection, self)._do_data(inrec)
finally:
self._lock.release()
-
+
+
class Server(object):
"""
The FastCGI server.
@@ -959,18 +985,18 @@ class Server(object):
# from the OS.
maxConns = resource.getrlimit(resource.RLIMIT_NOFILE)[0]
except ImportError:
- maxConns = 100 # Just some made up number.
+ maxConns = 100 # Just some made up number.
maxReqs = maxConns
if multiplexed:
self._connectionClass = MultiplexedConnection
- maxReqs *= 5 # Another made up number.
+ maxReqs *= 5 # Another made up number.
else:
self._connectionClass = Connection
self.capability = {
FCGI_MAX_CONNS: maxConns,
FCGI_MAX_REQS: maxReqs,
FCGI_MPXS_CONNS: multiplexed and 1 or 0
- }
+ }
else:
self._connectionClass = Connection
self.capability = {
@@ -978,18 +1004,18 @@ class Server(object):
FCGI_MAX_CONNS: 1,
FCGI_MAX_REQS: 1,
FCGI_MPXS_CONNS: 0
- }
+ }
self._bindAddress = bindAddress
self._umask = umask
def _setupSocket(self):
- if self._bindAddress is None: # Run as a normal FastCGI?
+ if self._bindAddress is None: # Run as a normal FastCGI?
isFCGI = True
if isFCGI:
try:
sock = socket.fromfd(FCGI_LISTENSOCK_FILENO, socket.AF_INET,
- socket.SOCK_STREAM)
+ socket.SOCK_STREAM)
sock.getpeername()
except AttributeError:
isFCGI = False
@@ -1042,16 +1068,16 @@ class Server(object):
sock.close()
def _installSignalHandlers(self):
- self._oldSIGs = [(x,signal.getsignal(x)) for x in
+ self._oldSIGs = [(x, signal.getsignal(x)) for x in
(signal.SIGHUP, signal.SIGINT, signal.SIGTERM)]
signal.signal(signal.SIGHUP, self._hupHandler)
signal.signal(signal.SIGINT, self._intHandler)
signal.signal(signal.SIGTERM, self._intHandler)
def _restoreSignalHandlers(self):
- for signum,handler in self._oldSIGs:
+ for signum, handler in self._oldSIGs:
signal.signal(signum, handler)
-
+
def _hupHandler(self, signum, frame):
self._hupReceived = True
self._keepGoing = False
@@ -1094,7 +1120,7 @@ class Server(object):
raise
if web_server_addrs and \
- (len(addr) != 2 or addr[0] not in web_server_addrs):
+ (len(addr) != 2 or addr[0] not in web_server_addrs):
clientSock.close()
continue
@@ -1145,11 +1171,13 @@ class Server(object):
req.stdout.write('Content-Type: text/html\r\n\r\n' +
cgitb.html(sys.exc_info()))
+
class WSGIServer(Server):
"""
FastCGI server that supports the Web Server Gateway Interface. See
<http://www.python.org/peps/pep-0333.html>.
"""
+
def __init__(self, application, environ=None, umask=None,
multithreaded=True, **kw):
"""
@@ -1160,7 +1188,7 @@ class WSGIServer(Server):
Set multithreaded to False if your application is not MT-safe.
"""
if kw.has_key('handler'):
- del kw['handler'] # Doesn't make sense to let this through
+ del kw['handler'] # Doesn't make sense to let this through
super(WSGIServer, self).__init__(**kw)
if environ is None:
@@ -1182,7 +1210,7 @@ class WSGIServer(Server):
environ = req.params
environ.update(self.environ)
- environ['wsgi.version'] = (1,0)
+ environ['wsgi.version'] = (1, 0)
environ['wsgi.input'] = req.stdin
if self._bindAddress is None:
stderr = req.stderr
@@ -1190,7 +1218,7 @@ class WSGIServer(Server):
stderr = TeeOutputStream((sys.stderr, req.stderr))
environ['wsgi.errors'] = stderr
environ['wsgi.multithread'] = not isinstance(req, CGIRequest) and \
- thread_available and self.multithreaded
+ thread_available and self.multithreaded
# Rationale for the following: If started by the web server
# (self._bindAddress is None) in either FastCGI or CGI mode, the
# possibility of being spawned multiple times simultaneously is quite
@@ -1218,7 +1246,7 @@ class WSGIServer(Server):
if not headers_sent:
status, responseHeaders = headers_sent[:] = headers_set
found = False
- for header,value in responseHeaders:
+ for header, value in responseHeaders:
if header.lower() == 'content-length':
found = True
break
@@ -1245,7 +1273,7 @@ class WSGIServer(Server):
# Re-raise if too late
raise exc_info[0], exc_info[1], exc_info[2]
finally:
- exc_info = None # avoid dangling circular ref
+ exc_info = None # avoid dangling circular ref
else:
assert not headers_set, 'Headers already set!'
@@ -1255,7 +1283,7 @@ class WSGIServer(Server):
assert status[3] == ' ', 'Status must have a space after code'
assert type(response_headers) is list, 'Headers must be a list'
if __debug__:
- for name,val in response_headers:
+ for name, val in response_headers:
assert type(name) is str, 'Header names must be strings'
assert type(val) is str, 'Header values must be strings'
@@ -1272,13 +1300,13 @@ class WSGIServer(Server):
if data:
write(data)
if not headers_sent:
- write('') # in case body was empty
+ write('') # in case body was empty
finally:
if hasattr(result, 'close'):
result.close()
except socket.error, e:
if e[0] != errno.EPIPE:
- raise # Don't let EPIPE propagate beyond server
+ raise # Don't let EPIPE propagate beyond server
finally:
if not self.multithreaded:
self._app_lock.release()
@@ -1294,16 +1322,17 @@ class WSGIServer(Server):
# If any of these are missing, it probably signifies a broken
# server...
- for name,default in [('REQUEST_METHOD', 'GET'),
- ('SERVER_NAME', 'localhost'),
- ('SERVER_PORT', '80'),
- ('SERVER_PROTOCOL', 'HTTP/1.0')]:
+ for name, default in [('REQUEST_METHOD', 'GET'),
+ ('SERVER_NAME', 'localhost'),
+ ('SERVER_PORT', '80'),
+ ('SERVER_PROTOCOL', 'HTTP/1.0')]:
if not environ.has_key(name):
environ['wsgi.errors'].write('%s: missing FastCGI param %s '
'required by WSGI!\n' %
(self.__class__.__name__, name))
environ[name] = default
-
+
+
if __name__ == '__main__':
def test_app(environ, start_response):
"""Probably not the most efficient example."""
diff --git a/cgi/formatting.py b/cgi/formatting.py
index 373d685..6461a33 100644
--- a/cgi/formatting.py
+++ b/cgi/formatting.py
@@ -14,412 +14,446 @@ from post import regenerateAccess
from settings import Settings
+
def format_post(message, ip, parentid, parent_timestamp=0):
- """
- Formats posts using the specified format
- """
- board = Settings._.BOARD
- using_markdown = False
-
- # Escape any HTML if user is not using Markdown or HTML
- if not Settings.USE_HTML:
- message = cgi.escape(message)
-
- # Strip text
- message = message.rstrip()[0:8000]
-
- # Treat HTML
- if Settings.USE_MARKDOWN:
- message = markdown(message)
- using_markdown = True
- if Settings.USE_HTML:
- message = onlyAllowedHTML(message)
-
- # [code] tag
- if board["dir"] == "tech":
- message = re.compile(r"\[code\](.+)\[/code\]", re.DOTALL | re.IGNORECASE).sub(r"<pre><code>\1</code></pre>", message)
- if board["allow_spoilers"]:
- message = re.compile(r"\[spoiler\](.+)\[/spoiler\]", re.DOTALL | re.IGNORECASE).sub(r'<span class="spoil">\1</span>', message)
-
- if Settings.VIDEO_THUMBS:
- (message, affected) = videoThumbs(message)
- #if affected:
- # message = close_html(message)
-
- message = clickableURLs(message)
- message = checkRefLinks(message, parentid, parent_timestamp)
- message = checkWordfilters(message, ip, board["dir"])
-
- # If not using markdown quotes must be created and \n changed for HTML line breaks
- if not using_markdown:
- message = re.compile(r"^(\n)+").sub('', message)
- message = checkQuotes(message)
- message = message.replace("\n", "<br />")
-
- return message
-
+ """
+ Formats posts using the specified format
+ """
+ board = Settings._.BOARD
+ using_markdown = False
+
+ # Escape any HTML if user is not using Markdown or HTML
+ if not Settings.USE_HTML:
+ message = cgi.escape(message)
+
+ # Strip text
+ message = message.rstrip()[0:8000]
+
+ # Treat HTML
+ if Settings.USE_MARKDOWN:
+ message = markdown(message)
+ using_markdown = True
+ if Settings.USE_HTML:
+ message = onlyAllowedHTML(message)
+
+ # [code] tag
+ if board["dir"] == "tech":
+ message = re.compile(r"\[code\](.+)\[/code\]", re.DOTALL |
+ re.IGNORECASE).sub(r"<pre><code>\1</code></pre>", message)
+ if board["allow_spoilers"]:
+ message = re.compile(r"\[spoiler\](.+)\[/spoiler\]", re.DOTALL |
+ re.IGNORECASE).sub(r'<span class="spoil">\1</span>', message)
+
+ if Settings.VIDEO_THUMBS:
+ (message, affected) = videoThumbs(message)
+ # if affected:
+ # message = close_html(message)
+
+ message = clickableURLs(message)
+ message = checkRefLinks(message, parentid, parent_timestamp)
+ message = checkWordfilters(message, ip, board["dir"])
+
+ # If not using markdown quotes must be created and \n changed for HTML line breaks
+ if not using_markdown:
+ message = re.compile(r"^(\n)+").sub('', message)
+ message = checkQuotes(message)
+ message = message.replace("\n", "<br />")
+
+ return message
+
+
def tripcode(name):
- """
- Calculate tripcode to match output of most imageboards
- """
- if name == '':
- return '', ''
-
- board = Settings._.BOARD
-
- name = name.decode('utf-8')
- key = Settings.TRIP_CHAR.decode('utf-8')
-
- # if there's a trip
- (namepart, marker, trippart) = name.partition('#')
- if marker:
- namepart = cleanString(namepart)
- trip = ''
-
- # secure tripcode
- if Settings.ALLOW_SECURE_TRIPCODES and '#' in trippart:
- (trippart, securemarker, securepart) = trippart.partition('#')
- try:
- securepart = securepart.encode("sjis", "ignore")
- except:
- pass
-
- # encode secure tripcode
- trip = getMD5(securepart + Settings.SECRET)
- trip = trip.encode('base64').replace('\n', '')
- trip = trip.encode('rot13')
- trip = key+key+trip[2:12]
-
- # return it if we don't have a normal tripcode
- if trippart == '':
+ """
+ Calculate tripcode to match output of most imageboards
+ """
+ if name == '':
+ return '', ''
+
+ board = Settings._.BOARD
+
+ name = name.decode('utf-8')
+ key = Settings.TRIP_CHAR.decode('utf-8')
+
+ # if there's a trip
+ (namepart, marker, trippart) = name.partition('#')
+ if marker:
+ namepart = cleanString(namepart)
+ trip = ''
+
+ # secure tripcode
+ if Settings.ALLOW_SECURE_TRIPCODES and '#' in trippart:
+ (trippart, securemarker, securepart) = trippart.partition('#')
+ try:
+ securepart = securepart.encode("sjis", "ignore")
+ except:
+ pass
+
+ # encode secure tripcode
+ trip = getMD5(securepart + Settings.SECRET)
+ trip = trip.encode('base64').replace('\n', '')
+ trip = trip.encode('rot13')
+ trip = key+key+trip[2:12]
+
+ # return it if we don't have a normal tripcode
+ if trippart == '':
+ return namepart.encode('utf-8'), trip.encode('utf-8')
+
+ # do normal tripcode
+ from crypt import crypt
+ try:
+ trippart = trippart.encode("sjis", "ignore")
+ except:
+ pass
+
+ trippart = cleanString(trippart, True, True)
+ salt = re.sub(r"[^\.-z]", ".", (trippart + "H..")[1:3])
+ salt = salt.translate(string.maketrans(r":;=?@[\]^_`", "ABDFGabcdef"))
+ trip = key + crypt(trippart, salt)[-10:] + trip
+
return namepart.encode('utf-8'), trip.encode('utf-8')
-
- # do normal tripcode
- from crypt import crypt
- try:
- trippart = trippart.encode("sjis", "ignore")
- except:
- pass
-
- trippart = cleanString(trippart, True, True)
- salt = re.sub(r"[^\.-z]", ".", (trippart + "H..")[1:3])
- salt = salt.translate(string.maketrans(r":;=?@[\]^_`", "ABDFGabcdef"))
- trip = key + crypt(trippart, salt)[-10:] + trip
-
- return namepart.encode('utf-8'), trip.encode('utf-8')
-
- return name.encode('utf-8'), ''
+
+ return name.encode('utf-8'), ''
+
def iphash(ip, post, t, useid, mobile, agent, cap_id, hide_end, has_countrycode):
- current_t = time.time()
-
- if cap_id:
- id = cap_id
- elif 'sage' in post['email'] and useid == '1':
- id = '???'
- elif ip == "127.0.0.1":
- id = '???'
- else:
- day = int((current_t + (Settings.TIME_ZONE*3600)) / 86400)
- word = ',' + str(day) # IDs change every 24 hours
- word += ',' + str(t) # IDs vary depending on thread
- id = hide_data(ip + word, 6, "id", Settings.SECRET)
-
- if hide_end:
- id += '*'
- elif addressIsTor(ip):
- id += 'T'
- elif 'Dalvik' in agent:
- id += 'R'
- elif 'Android' in agent:
- id += 'a'
- elif 'iPhone' in agent:
- id += 'i'
- elif useid == '3':
- if 'Firefox' in agent:
- id += 'F'
- elif 'Safari' in agent and not 'Chrome' in agent:
- id += 's'
- elif 'Chrome' in agent:
- id += 'C'
- elif 'SeaMonkey' in agent:
- id += 'S'
- elif 'Edge' in agent:
- id += 'E'
- elif 'Opera' in agent or 'OPR' in agent:
- id += 'o'
- elif 'MSIE' in agent or 'Trident' in agent:
- id += 'I'
+ current_t = time.time()
+
+ if cap_id:
+ id = cap_id
+ elif 'sage' in post['email'] and useid == '1':
+ id = '???'
+ elif ip == "127.0.0.1":
+ id = '???'
+ else:
+ day = int((current_t + (Settings.TIME_ZONE*3600)) / 86400)
+ word = ',' + str(day) # IDs change every 24 hours
+ word += ',' + str(t) # IDs vary depending on thread
+ id = hide_data(ip + word, 6, "id", Settings.SECRET)
+
+ if hide_end:
+ id += '*'
+ elif addressIsTor(ip):
+ id += 'T'
+ elif 'Dalvik' in agent:
+ id += 'R'
+ elif 'Android' in agent:
+ id += 'a'
+ elif 'iPhone' in agent:
+ id += 'i'
+ elif useid == '3':
+ if 'Firefox' in agent:
+ id += 'F'
+ elif 'Safari' in agent and not 'Chrome' in agent:
+ id += 's'
+ elif 'Chrome' in agent:
+ id += 'C'
+ elif 'SeaMonkey' in agent:
+ id += 'S'
+ elif 'Edge' in agent:
+ id += 'E'
+ elif 'Opera' in agent or 'OPR' in agent:
+ id += 'o'
+ elif 'MSIE' in agent or 'Trident' in agent:
+ id += 'I'
+ elif mobile:
+ id += 'Q'
+ else:
+ id += '0'
elif mobile:
- id += 'Q'
+ id += 'Q'
else:
- id += '0'
- elif mobile:
- id += 'Q'
- else:
- id += '0'
-
- if addressIsBanned(ip, ""):
- id += '#'
- elif (not has_countrycode and not addressIsTor(ip) and
- (addressIsProxy(ip) or not addressIsES(ip))):
- id += '!'
-
- return id
+ id += '0'
+
+ if addressIsBanned(ip, ""):
+ id += '#'
+ elif (not has_countrycode and not addressIsTor(ip) and
+ (addressIsProxy(ip) or not addressIsES(ip))):
+ id += '!'
+
+ return id
+
def cleanString(string, escape=True, quote=False):
- string = string.strip()
- if escape:
- string = cgi.escape(string, quote)
- return string
+ string = string.strip()
+ if escape:
+ string = cgi.escape(string, quote)
+ return string
+
def clickableURLs(message):
- # URL
- message = re.compile(r'( |^|:|\(|\[)((?:https?://|ftp://|mailto:|news:|irc:)[^\s<>()"]*?(?:\([^\s<>()"]*?\)[^\s<>()"]*?)*)((?:\s|<|>|"|\.|\|\]|!|\?|,|&#44;|&quot;)*(?:[\s<>()"]|$))', re.M).sub(r'\1<a href="\2" rel="nofollow" target="_blank">\2</a>\3', message)
- # Emails
- message = re.compile(r"( |^|:)([A-Z0-9._%+-]+@[A-Z0-9.-]+\.[A-Z]{2,6})", re.I | re.M).sub(r'\1<a href="mailto:\2" rel="nofollow">&lt;\2&gt;</a>', message)
+ # URL
+ message = re.compile(r'( |^|:|\(|\[)((?:https?://|ftp://|mailto:|news:|irc:)[^\s<>()"]*?(?:\([^\s<>()"]*?\)[^\s<>()"]*?)*)((?:\s|<|>|"|\.|\|\]|!|\?|,|&#44;|&quot;)*(?:[\s<>()"]|$))',
+ re.M).sub(r'\1<a href="\2" rel="nofollow" target="_blank">\2</a>\3', message)
+ # Emails
+ message = re.compile(r"( |^|:)([A-Z0-9._%+-]+@[A-Z0-9.-]+\.[A-Z]{2,6})", re.I | re.M).sub(
+ r'\1<a href="mailto:\2" rel="nofollow">&lt;\2&gt;</a>', message)
+
+ return message
+
- return message
-
def videoThumbs(message):
- # Youtube
- __RE = re.compile(r"^(?: +)?(https?://(?:www\.)?youtu(?:be\.com/watch\?v=|\.be/)([\w\-]+))(?: +)?$", re.M)
- matches = __RE.finditer(message)
- if matches:
- import json
- import urllib, urllib2
-
- v_ids = []
- videos = {}
-
- for match in matches:
- v_id = match.group(2)
- if v_id not in v_ids:
- v_ids.append(v_id)
- videos[v_id] = {
- 'span': match.span(0),
- 'url': match.group(1),
+ # Youtube
+ __RE = re.compile(
+ r"^(?: +)?(https?://(?:www\.)?youtu(?:be\.com/watch\?v=|\.be/)([\w\-]+))(?: +)?$", re.M)
+ matches = __RE.finditer(message)
+ if matches:
+ import json
+ import urllib
+ import urllib2
+
+ v_ids = []
+ videos = {}
+
+ for match in matches:
+ v_id = match.group(2)
+ if v_id not in v_ids:
+ v_ids.append(v_id)
+ videos[v_id] = {
+ 'span': match.span(0),
+ 'url': match.group(1),
+ }
+ if len(v_ids) >= Settings.VIDEO_THUMBS_LIMIT:
+ raise UserError, "Has incluído muchos videos en tu mensaje. El máximo es %d." % Settings.VIDEO_THUMBS_LIMIT
+
+ if videos:
+ params = {
+ 'key': Settings.GOOGLE_API_KEY,
+ 'part': 'snippet,contentDetails',
+ 'id': ','.join(v_ids)
}
- if len(v_ids) >= Settings.VIDEO_THUMBS_LIMIT:
- raise UserError, "Has incluído muchos videos en tu mensaje. El máximo es %d." % Settings.VIDEO_THUMBS_LIMIT
-
- if videos:
- params = {
- 'key': Settings.GOOGLE_API_KEY,
- 'part': 'snippet,contentDetails',
- 'id': ','.join(v_ids)
- }
- r_url = "https://www.googleapis.com/youtube/v3/videos?"+urllib.urlencode(params)
- res = urllib2.urlopen(r_url)
- res_json = json.load(res)
-
- offset = 0
- for item in res_json['items']:
- v_id = item['id']
- (start, end) = videos[v_id]['span']
- end += 1 # remove endline
-
- try:
- new_url = '<a href="%(url)s" target="_blank" class="yt"><span class="pvw"><img src="%(thumb)s" /></span><b>%(title)s</b> (%(secs)s)<br />%(channel)s</a><br />' \
- % {'title': item['snippet']['title'].encode('utf-8'),
- 'channel': item['snippet']['channelTitle'].encode('utf-8'),
- 'secs': parseIsoPeriod(item['contentDetails']['duration']).encode('utf-8'),
- 'url': videos[v_id]['url'],
- 'id': v_id.encode('utf-8'),
- 'thumb': item['snippet']['thumbnails']['default']['url'].encode('utf-8'),}
- except UnicodeDecodeError:
- raise UserError, repr(v_id)
- message = message[:start+offset] + new_url + message[end+offset:]
- offset += len(new_url) - (end-start)
-
- return (message, len(videos))
+ r_url = "https://www.googleapis.com/youtube/v3/videos?" + \
+ urllib.urlencode(params)
+ res = urllib2.urlopen(r_url)
+ res_json = json.load(res)
+
+ offset = 0
+ for item in res_json['items']:
+ v_id = item['id']
+ (start, end) = videos[v_id]['span']
+ end += 1 # remove endline
+
+ try:
+ new_url = '<a href="%(url)s" target="_blank" class="yt"><span class="pvw"><img src="%(thumb)s" /></span><b>%(title)s</b> (%(secs)s)<br />%(channel)s</a><br />' \
+ % {'title': item['snippet']['title'].encode('utf-8'),
+ 'channel': item['snippet']['channelTitle'].encode('utf-8'),
+ 'secs': parseIsoPeriod(item['contentDetails']['duration']).encode('utf-8'),
+ 'url': videos[v_id]['url'],
+ 'id': v_id.encode('utf-8'),
+ 'thumb': item['snippet']['thumbnails']['default']['url'].encode('utf-8'), }
+ except UnicodeDecodeError:
+ raise UserError, repr(v_id)
+ message = message[:start+offset] + new_url + message[end+offset:]
+ offset += len(new_url) - (end-start)
+
+ return (message, len(videos))
+
def fixMobileLinks(message):
- """
- Shorten long links; Convert >># links into a mobile version
- """
- board = Settings._.BOARD
-
- # If textboard
- if board["board_type"] == '1':
- message = re.compile(r'<a href="/(\w+)/read/(\d+)(\.html)?/*(.+)"').sub(r'<a href="/cgi/mobileread/\1/\2/\4"', message)
- else:
- message = re.compile(r'<a href="/(\w+)/res/(\d+)\.html#(\d+)"').sub(r'<a href="/cgi/mobileread/\1/\2#\3"', message)
-
- return message
-
+ """
+ Shorten long links; Convert >># links into a mobile version
+ """
+ board = Settings._.BOARD
+
+ # If textboard
+ if board["board_type"] == '1':
+ message = re.compile(r'<a href="/(\w+)/read/(\d+)(\.html)?/*(.+)"').sub(
+ r'<a href="/cgi/mobileread/\1/\2/\4"', message)
+ else:
+ message = re.compile(r'<a href="/(\w+)/res/(\d+)\.html#(\d+)"').sub(
+ r'<a href="/cgi/mobileread/\1/\2#\3"', message)
+
+ return message
+
+
def checkRefLinks(message, parentid, parent_timestamp):
- """
- Check for >># links in posts and replace with the HTML to make them clickable
- """
- board = Settings._.BOARD
-
- if board["board_type"] == '1':
- # Textboard
- if parentid != '0':
- message = re.compile(r'&gt;&gt;(\d+(,\d+|-(?=[ \d\n])|\d+)*n?)').sub('<a href="' + Settings.BOARDS_URL + board['dir'] + '/read/' + str(parent_timestamp) + r'/\1">&gt;&gt;\1</a>', message)
- else:
- # Imageboard
- quotes_id_array = re.findall(r"&gt;&gt;([0-9]+)", message)
- for quotes in quotes_id_array:
- try:
- post = FetchOne('SELECT * FROM `posts` WHERE `id` = ' + quotes + ' AND `boardid` = ' + board['id'] + ' LIMIT 1')
- if post['parentid'] != '0':
- message = re.compile("&gt;&gt;" + quotes).sub('<a href="' + Settings.BOARDS_URL + board['dir'] + '/res/' + post['parentid'] + '.html#' + quotes + '">&gt;&gt;' + quotes + '</a>', message)
- else:
- message = re.compile("&gt;&gt;" + quotes).sub('<a href="' + Settings.BOARDS_URL + board['dir'] + '/res/' + post['id'] + '.html#' + quotes + '">&gt;&gt;' + quotes + '</a>', message)
- except:
- message = re.compile("&gt;&gt;" + quotes).sub(r'<span class="q">&gt;&gt;'+quotes+'</span>', message)
-
- return message
+ """
+ Check for >># links in posts and replace with the HTML to make them clickable
+ """
+ board = Settings._.BOARD
+
+ if board["board_type"] == '1':
+ # Textboard
+ if parentid != '0':
+ message = re.compile(r'&gt;&gt;(\d+(,\d+|-(?=[ \d\n])|\d+)*n?)').sub(
+ '<a href="' + Settings.BOARDS_URL + board['dir'] + '/read/' + str(parent_timestamp) + r'/\1">&gt;&gt;\1</a>', message)
+ else:
+ # Imageboard
+ quotes_id_array = re.findall(r"&gt;&gt;([0-9]+)", message)
+ for quotes in quotes_id_array:
+ try:
+ post = FetchOne('SELECT * FROM `posts` WHERE `id` = ' +
+ quotes + ' AND `boardid` = ' + board['id'] + ' LIMIT 1')
+ if post['parentid'] != '0':
+ message = re.compile("&gt;&gt;" + quotes).sub('<a href="' + Settings.BOARDS_URL +
+ board['dir'] + '/res/' + post['parentid'] + '.html#' + quotes + '">&gt;&gt;' + quotes + '</a>', message)
+ else:
+ message = re.compile("&gt;&gt;" + quotes).sub('<a href="' + Settings.BOARDS_URL +
+ board['dir'] + '/res/' + post['id'] + '.html#' + quotes + '">&gt;&gt;' + quotes + '</a>', message)
+ except:
+ message = re.compile(
+ "&gt;&gt;" + quotes).sub(r'<span class="q">&gt;&gt;'+quotes+'</span>', message)
+
+ return message
+
def checkQuotes(message):
- """
- Check for >text in posts and add span around it to color according to the css
- """
- message = re.compile(r"^&gt;(.*)$", re.MULTILINE).sub(r'<span class="q">&gt;\1</span>', message)
- return message
+ """
+ Check for >text in posts and add span around it to color according to the css
+ """
+ message = re.compile(
+ r"^&gt;(.*)$", re.MULTILINE).sub(r'<span class="q">&gt;\1</span>', message)
+ return message
+
def escapeHTML(string):
- string = string.replace('<', '&lt;')
- string = string.replace('>', '&gt;')
- return string
+ string = string.replace('<', '&lt;')
+ string = string.replace('>', '&gt;')
+ return string
+
def onlyAllowedHTML(message):
- """
- Allow <b>, <i>, <u>, <strike>, and <pre> in posts, along with the special <aa>
- """
- message = sanitize_html(message)
- #message = re.compile(r"\[aa\](.+?)\[/aa\]", re.DOTALL | re.IGNORECASE).sub("<span class=\"sjis\">\\1</span>", message)
-
- return message
+ """
+ Allow <b>, <i>, <u>, <strike>, and <pre> in posts, along with the special <aa>
+ """
+ message = sanitize_html(message)
+ #message = re.compile(r"\[aa\](.+?)\[/aa\]", re.DOTALL | re.IGNORECASE).sub("<span class=\"sjis\">\\1</span>", message)
+
+ return message
+
def close_html(message):
- """
- Old retarded version of sanitize_html, it just closes open tags.
- """
- import BeautifulSoup
-
- message = message.encode('utf-8')
- soup = BeautifulSoup.BeautifulSoup(message)
+ """
+ Old retarded version of sanitize_html, it just closes open tags.
+ """
+ import BeautifulSoup
+
+ message = message.encode('utf-8')
+ soup = BeautifulSoup.BeautifulSoup(message)
+
+ return unicode(soup).replace('&#13;', '').encode('utf-8')
- return unicode(soup).replace('&#13;', '').encode('utf-8')
def sanitize_html(message, decode=True):
- """
- Clean the code and allow only a few safe tags.
- """
- import BeautifulSoup
-
- # Decode message from utf-8 if required
- if decode:
- message = message.decode('utf-8', 'replace')
-
- # Create HTML Cleaner with our allowed tags
- whitelist_tags = ["a","b","br","blink","code","del","em","i","marquee","root","strike","strong","sub","sup","u"]
- whitelist_attr = ["href"]
-
- soup = BeautifulSoup.BeautifulSoup(message)
-
- # Remove tags that aren't allowed
- for tag in soup.findAll():
- if not tag.name.lower() in whitelist_tags:
- tag.name = "span"
- tag.attrs = []
- else:
- for attr in [attr for attr in tag.attrs if attr not in whitelist_attr]:
- del tag[attr]
+ """
+ Clean the code and allow only a few safe tags.
+ """
+ import BeautifulSoup
+
+ # Decode message from utf-8 if required
+ if decode:
+ message = message.decode('utf-8', 'replace')
+
+ # Create HTML Cleaner with our allowed tags
+ whitelist_tags = ["a", "b", "br", "blink", "code", "del", "em",
+ "i", "marquee", "root", "strike", "strong", "sub", "sup", "u"]
+ whitelist_attr = ["href"]
+
+ soup = BeautifulSoup.BeautifulSoup(message)
+
+ # Remove tags that aren't allowed
+ for tag in soup.findAll():
+ if not tag.name.lower() in whitelist_tags:
+ tag.name = "span"
+ tag.attrs = []
+ else:
+ for attr in [attr for attr in tag.attrs if attr not in whitelist_attr]:
+ del tag[attr]
+
+ # We export the soup into a correct XHTML string
+ string = unicode(soup).encode('utf-8')
+ # We remove some anomalies we don't want
+ string = string.replace('<br/>', '<br />').replace('&#13;', '')
+
+ return string
- # We export the soup into a correct XHTML string
- string = unicode(soup).encode('utf-8')
- # We remove some anomalies we don't want
- string = string.replace('<br/>', '<br />').replace('&#13;', '')
-
- return string
def markdown(message):
- import markdown
- if message.strip() != "":
- #return markdown.markdown(message).rstrip("\n").rstrip("<br />")
- return markdown.markdown(message, extras=["cuddled-lists", "code-friendly"]).encode('utf-8')
- else:
- return ""
+ import markdown
+ if message.strip() != "":
+ # return markdown.markdown(message).rstrip("\n").rstrip("<br />")
+ return markdown.markdown(message, extras=["cuddled-lists", "code-friendly"]).encode('utf-8')
+ else:
+ return ""
+
def checkWordfilters(message, ip, board):
- fixed_ip = inet_aton(ip)
- wordfilters = FetchAll("SELECT * FROM `filters` WHERE `type` = '0' ORDER BY `id` ASC")
- for wordfilter in wordfilters:
- if wordfilter["boards"] != "":
- boards = pickle.loads(wordfilter["boards"])
- if wordfilter["boards"] == "" or board in boards:
- if wordfilter['action'] == '0':
- if not re.search(wordfilter['from'], message, re.DOTALL | re.IGNORECASE) is None:
- raise UserError, wordfilter['reason']
- elif wordfilter['action'] == '1':
- message = re.compile(wordfilter['from'], re.DOTALL | re.IGNORECASE).sub(wordfilter['to'], message)
- elif wordfilter['action'] == '2':
- # Ban
- if not re.search(wordfilter['from'], message, re.DOTALL | re.IGNORECASE) is None:
- if wordfilter['seconds'] != '0':
- until = str(timestamp() + int(wordfilter['seconds']))
- else:
- until = '0'
-
- InsertDb("INSERT INTO `bans` (`ip`, `boards`, `added`, `until`, `staff`, `reason`, `note`, `blind`) VALUES (" + \
- "'" + str(fixed_ip) + "', '" + _mysql.escape_string(wordfilter['boards']) + \
- "', " + str(timestamp()) + ", " + until + ", 'System', '" + _mysql.escape_string(wordfilter['reason']) + \
- "', 'Word Auto-ban', '"+_mysql.escape_string(wordfilter['blind'])+"')")
- regenerateAccess()
- raise UserError, wordfilter['reason']
- elif wordfilter['action'] == '3':
- if not re.search(wordfilter['from'], message, re.DOTALL | re.IGNORECASE) is None:
- raise UserError, '<meta http-equiv="refresh" content="%s;url=%s" />%s' % (wordfilter['redirect_time'], wordfilter['redirect_url'], wordfilter['reason'])
- return message
-
+ fixed_ip = inet_aton(ip)
+ wordfilters = FetchAll(
+ "SELECT * FROM `filters` WHERE `type` = '0' ORDER BY `id` ASC")
+ for wordfilter in wordfilters:
+ if wordfilter["boards"] != "":
+ boards = pickle.loads(wordfilter["boards"])
+ if wordfilter["boards"] == "" or board in boards:
+ if wordfilter['action'] == '0':
+ if not re.search(wordfilter['from'], message, re.DOTALL | re.IGNORECASE) is None:
+ raise UserError, wordfilter['reason']
+ elif wordfilter['action'] == '1':
+ message = re.compile(wordfilter['from'], re.DOTALL | re.IGNORECASE).sub(
+ wordfilter['to'], message)
+ elif wordfilter['action'] == '2':
+ # Ban
+ if not re.search(wordfilter['from'], message, re.DOTALL | re.IGNORECASE) is None:
+ if wordfilter['seconds'] != '0':
+ until = str(timestamp() + int(wordfilter['seconds']))
+ else:
+ until = '0'
+
+ InsertDb("INSERT INTO `bans` (`ip`, `boards`, `added`, `until`, `staff`, `reason`, `note`, `blind`) VALUES (" +
+ "'" + str(fixed_ip) + "', '" + _mysql.escape_string(wordfilter['boards']) +
+ "', " + str(timestamp()) + ", " + until + ", 'System', '" + _mysql.escape_string(wordfilter['reason']) +
+ "', 'Word Auto-ban', '"+_mysql.escape_string(wordfilter['blind'])+"')")
+ regenerateAccess()
+ raise UserError, wordfilter['reason']
+ elif wordfilter['action'] == '3':
+ if not re.search(wordfilter['from'], message, re.DOTALL | re.IGNORECASE) is None:
+ raise UserError, '<meta http-equiv="refresh" content="%s;url=%s" />%s' % (wordfilter['redirect_time'], wordfilter['redirect_url'], wordfilter['reason'])
+ return message
+
+
def checkNamefilters(name, tripcode, ip, board):
- namefilters = FetchAll("SELECT * FROM `filters` WHERE `type` = '1'")
-
- for namefilter in namefilters:
- if namefilter["boards"] != "":
- boards = pickle.loads(namefilter["boards"])
- if namefilter["boards"] == "" or board in boards:
- # check if this filter applies
- match = False
-
- if namefilter['from'] and namefilter['from_trip']:
- # both name and trip filter
- if re.search(namefilter['from'], name, re.DOTALL | re.IGNORECASE) and tripcode == namefilter['from_trip']:
- match = True
- elif namefilter['from'] and not namefilter['from_trip']:
- # name filter
- if re.search(namefilter['from'], name, re.DOTALL | re.IGNORECASE):
- match = True
- elif not namefilter['from'] and namefilter['from_trip']:
- # trip filter
- if tripcode == namefilter['from_trip']:
- match = True
-
- if match:
- # do action
- if namefilter['action'] == '0':
- raise UserError, namefilter['reason']
- elif namefilter['action'] == '1':
- name = namefilter['to']
- tripcode = ''
- return name, tripcode
- elif namefilter['action'] == '2':
- # Ban
- if namefilter['seconds'] != '0':
- until = str(timestamp() + int(namefilter['seconds']))
- else:
- until = '0'
-
- InsertDb("INSERT INTO `bans` (`ip`, `boards`, `added`, `until`, `staff`, `reason`, `note`, `blind`) VALUES (" + \
- "'" + _mysql.escape_string(ip) + "', '" + _mysql.escape_string(namefilter['boards']) + \
- "', " + str(timestamp()) + ", " + until + ", 'System', '" + _mysql.escape_string(namefilter['reason']) + \
- "', 'Name Auto-ban', '"+_mysql.escape_string(namefilter['blind'])+"')")
- regenerateAccess()
- raise UserError, namefilter['reason']
- elif namefilter['action'] == '3':
- raise UserError, '<meta http-equiv="refresh" content="%s;url=%s" />%s' % (namefilter['redirect_time'], namefilter['redirect_url'], namefilter['reason'])
- return name, tripcode
+ namefilters = FetchAll("SELECT * FROM `filters` WHERE `type` = '1'")
+
+ for namefilter in namefilters:
+ if namefilter["boards"] != "":
+ boards = pickle.loads(namefilter["boards"])
+ if namefilter["boards"] == "" or board in boards:
+ # check if this filter applies
+ match = False
+
+ if namefilter['from'] and namefilter['from_trip']:
+ # both name and trip filter
+ if re.search(namefilter['from'], name, re.DOTALL | re.IGNORECASE) and tripcode == namefilter['from_trip']:
+ match = True
+ elif namefilter['from'] and not namefilter['from_trip']:
+ # name filter
+ if re.search(namefilter['from'], name, re.DOTALL | re.IGNORECASE):
+ match = True
+ elif not namefilter['from'] and namefilter['from_trip']:
+ # trip filter
+ if tripcode == namefilter['from_trip']:
+ match = True
+
+ if match:
+ # do action
+ if namefilter['action'] == '0':
+ raise UserError, namefilter['reason']
+ elif namefilter['action'] == '1':
+ name = namefilter['to']
+ tripcode = ''
+ return name, tripcode
+ elif namefilter['action'] == '2':
+ # Ban
+ if namefilter['seconds'] != '0':
+ until = str(timestamp() + int(namefilter['seconds']))
+ else:
+ until = '0'
+
+ InsertDb("INSERT INTO `bans` (`ip`, `boards`, `added`, `until`, `staff`, `reason`, `note`, `blind`) VALUES (" +
+ "'" + _mysql.escape_string(ip) + "', '" + _mysql.escape_string(namefilter['boards']) +
+ "', " + str(timestamp()) + ", " + until + ", 'System', '" + _mysql.escape_string(namefilter['reason']) +
+ "', 'Name Auto-ban', '"+_mysql.escape_string(namefilter['blind'])+"')")
+ regenerateAccess()
+ raise UserError, namefilter['reason']
+ elif namefilter['action'] == '3':
+ raise UserError, '<meta http-equiv="refresh" content="%s;url=%s" />%s' % (namefilter['redirect_time'], namefilter['redirect_url'], namefilter['reason'])
+ return name, tripcode
diff --git a/cgi/framework.py b/cgi/framework.py
index f90ed92..cc7a065 100644
--- a/cgi/framework.py
+++ b/cgi/framework.py
@@ -14,453 +14,508 @@ from Cookie import SimpleCookie
from settings import Settings
from database import *
+
def setBoard(dir):
- """
- Sets the board which the script is operating on by filling Settings._.BOARD
- with the data from the db.
- """
- if not dir:
- raise UserError, _("The specified board is invalid.")
- logTime("Seteando el board " + dir)
- board = FetchOne("SELECT * FROM `boards` WHERE `dir` = '%s' LIMIT 1" % _mysql.escape_string(dir))
- if not board:
- raise UserError, _("The specified board is invalid.")
-
- board["filetypes"] = FetchAll("SELECT * FROM `boards_filetypes` INNER JOIN `filetypes` ON filetypes.id = boards_filetypes.filetypeid WHERE `boardid` = %s ORDER BY `ext` ASC" % _mysql.escape_string(board['id']))
- board["filetypes_ext"] = [filetype['ext'] for filetype in board['filetypes']]
- logTime("Board seteado.")
-
- Settings._.BOARD = board
-
- return board
+ """
+ Sets the board which the script is operating on by filling Settings._.BOARD
+ with the data from the db.
+ """
+ if not dir:
+ raise UserError, _("The specified board is invalid.")
+ logTime("Seteando el board " + dir)
+ board = FetchOne(
+ "SELECT * FROM `boards` WHERE `dir` = '%s' LIMIT 1" % _mysql.escape_string(dir))
+ if not board:
+ raise UserError, _("The specified board is invalid.")
+
+ board["filetypes"] = FetchAll(
+ "SELECT * FROM `boards_filetypes` INNER JOIN `filetypes` ON filetypes.id = boards_filetypes.filetypeid WHERE `boardid` = %s ORDER BY `ext` ASC" % _mysql.escape_string(board['id']))
+ board["filetypes_ext"] = [filetype['ext']
+ for filetype in board['filetypes']]
+ logTime("Board seteado.")
+
+ Settings._.BOARD = board
+
+ return board
+
def cleanDir(path, ext=None):
- if ext:
- filelist = [ f for f in os.listdir(path) if f.endswith("." + ext) ]
- else:
- filelist = os.listdir(path)
-
- for f in filelist:
- os.remove(os.path.join(path, f))
+ if ext:
+ filelist = [f for f in os.listdir(path) if f.endswith("." + ext)]
+ else:
+ filelist = os.listdir(path)
+
+ for f in filelist:
+ os.remove(os.path.join(path, f))
+
def addressIsBanned(ip, board):
- packed_ip = inet_aton(ip)
- bans = FetchAll("SELECT * FROM `bans` WHERE (`netmask` IS NULL AND `ip` = '"+str(packed_ip)+"') OR (`netmask` IS NOT NULL AND '"+str(packed_ip)+"' & `netmask` = `ip`)")
- logTime("SELECT * FROM `bans` WHERE (`netmask` IS NULL AND `ip` = '"+str(packed_ip)+"') OR (`netmask` IS NOT NULL AND '"+str(packed_ip)+"' & `netmask` = `ip`)")
- for ban in bans:
- if ban["boards"] != "":
- boards = pickle.loads(ban["boards"])
- if ban["boards"] == "" or board in boards:
- if board not in Settings.EXCLUDE_GLOBAL_BANS:
- return True
- return False
+ packed_ip = inet_aton(ip)
+ bans = FetchAll("SELECT * FROM `bans` WHERE (`netmask` IS NULL AND `ip` = '"+str(
+ packed_ip)+"') OR (`netmask` IS NOT NULL AND '"+str(packed_ip)+"' & `netmask` = `ip`)")
+ logTime("SELECT * FROM `bans` WHERE (`netmask` IS NULL AND `ip` = '"+str(packed_ip) +
+ "') OR (`netmask` IS NOT NULL AND '"+str(packed_ip)+"' & `netmask` = `ip`)")
+ for ban in bans:
+ if ban["boards"] != "":
+ boards = pickle.loads(ban["boards"])
+ if ban["boards"] == "" or board in boards:
+ if board not in Settings.EXCLUDE_GLOBAL_BANS:
+ return True
+ return False
+
def addressIsTor(ip):
- if Settings._.IS_TOR is None:
- res = False
- nodes = []
- if ip == '127.0.0.1': # Tor proxy address
- res = True
+ if Settings._.IS_TOR is None:
+ res = False
+ nodes = []
+ if ip == '127.0.0.1': # Tor proxy address
+ res = True
+ else:
+ with open('tor.txt') as f:
+ nodes = [line.rstrip() for line in f]
+ if ip in nodes:
+ res = True
+ Settings._.IS_TOR = res
+ return res
else:
- with open('tor.txt') as f:
- nodes = [line.rstrip() for line in f]
- if ip in nodes:
- res = True
- Settings._.IS_TOR = res
- return res
- else:
- return Settings._.IS_TOR
+ return Settings._.IS_TOR
+
def addressIsProxy(ip):
- if Settings._.IS_PROXY is None:
- res = False
- proxies = []
- with open('proxy.txt') as f:
- proxies = [line.rstrip() for line in f]
- if ip in proxies:
- res = True
- Settings._.IS_PROXY = res
- return res
- else:
- return Settings._.IS_PROXY
-
+ if Settings._.IS_PROXY is None:
+ res = False
+ proxies = []
+ with open('proxy.txt') as f:
+ proxies = [line.rstrip() for line in f]
+ if ip in proxies:
+ res = True
+ Settings._.IS_PROXY = res
+ return res
+ else:
+ return Settings._.IS_PROXY
+
+
def addressIsES(ip):
- ES = ['AR', 'BO', 'CL', 'CO', 'CR', 'CU', 'EC', 'ES', 'GF',
- 'GY', 'GT', 'HN', 'MX', 'NI', 'PA', 'PE', 'PY', 'PR', 'SR', 'UY', 'VE'] # 'BR',
- return getCountry(ip) in ES
+ ES = ['AR', 'BO', 'CL', 'CO', 'CR', 'CU', 'EC', 'ES', 'GF',
+ 'GY', 'GT', 'HN', 'MX', 'NI', 'PA', 'PE', 'PY', 'PR', 'SR', 'UY', 'VE'] # 'BR',
+ return getCountry(ip) in ES
+
def addressIsUS(ip):
- return getCountry(ip) == 'US'
+ return getCountry(ip) == 'US'
+
def getCountry(ip):
- import geoip
- return geoip.country(ip)
-
+ import geoip
+ return geoip.country(ip)
+
+
def getHost(ip):
- if Settings._.HOST is None:
- try:
- Settings._.HOST = socket.gethostbyaddr(ip)[0]
- return Settings._.HOST
- except socket.herror:
- return None
- else:
- return Settings._.HOST
-
+ if Settings._.HOST is None:
+ try:
+ Settings._.HOST = socket.gethostbyaddr(ip)[0]
+ return Settings._.HOST
+ except socket.herror:
+ return None
+ else:
+ return Settings._.HOST
+
+
def hostIsBanned(ip):
- host = getHost(ip)
- if host:
- banned_hosts = []
- for banned_host in banned_hosts:
- if host.endswith(banned_host):
- return True
- return False
- else:
- return False
-
+ host = getHost(ip)
+ if host:
+ banned_hosts = []
+ for banned_host in banned_hosts:
+ if host.endswith(banned_host):
+ return True
+ return False
+ else:
+ return False
+
+
def updateBoardSettings():
- """
- Pickle the board's settings and store it in the configuration field
- """
- board = Settings._.BOARD
- #UpdateDb("UPDATE `boards` SET `configuration` = '%s' WHERE `id` = %s LIMIT 1" % (_mysql.escape_string(configuration), board["id"]))
-
- del board["filetypes"]
- del board["filetypes_ext"]
- post_values = ["`" + _mysql.escape_string(str(key)) + "` = '" + _mysql.escape_string(str(value)) + "'" for key, value in board.iteritems()]
-
- UpdateDb("UPDATE `boards` SET %s WHERE `id` = '%s' LIMIT 1" % (", ".join(post_values), board["id"]))
+ """
+ Pickle the board's settings and store it in the configuration field
+ """
+ board = Settings._.BOARD
+ #UpdateDb("UPDATE `boards` SET `configuration` = '%s' WHERE `id` = %s LIMIT 1" % (_mysql.escape_string(configuration), board["id"]))
+
+ del board["filetypes"]
+ del board["filetypes_ext"]
+ post_values = ["`" + _mysql.escape_string(str(key)) + "` = '" + _mysql.escape_string(
+ str(value)) + "'" for key, value in board.iteritems()]
+
+ UpdateDb("UPDATE `boards` SET %s WHERE `id` = '%s' LIMIT 1" %
+ (", ".join(post_values), board["id"]))
+
def timestamp(t=None):
- """
- Create MySQL-safe timestamp from the datetime t if provided, otherwise create
- the timestamp from datetime.now()
- """
- if not t:
- t = datetime.datetime.now()
- return int(time.mktime(t.timetuple()))
+ """
+ Create MySQL-safe timestamp from the datetime t if provided, otherwise create
+ the timestamp from datetime.now()
+ """
+ if not t:
+ t = datetime.datetime.now()
+ return int(time.mktime(t.timetuple()))
+
def formatDate(t=None, home=False):
- """
- Format a datetime to a readable date
- """
- if not t:
- t = datetime.datetime.now()
-
- days = {'en': ['mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun'],
- 'es': ['lun', 'mar', 'mie', 'jue', 'vie', 'sab', 'dom'],
- 'jp': ['月', '火', '水', '木', '金', '土', '日']}
-
- daylist = days[Settings.LANG]
- format = "%d/%m/%y(%a)%H:%M:%S"
-
- if not home:
- try:
- board = Settings._.BOARD
- if board["dir"] == 'world':
- daylist = days['en']
- elif board["dir"] == '2d':
- daylist = days['jp']
- except:
- pass
-
- t = t.strftime(format)
-
- t = re.compile(r"mon", re.DOTALL | re.IGNORECASE).sub(daylist[0], t)
- t = re.compile(r"tue", re.DOTALL | re.IGNORECASE).sub(daylist[1], t)
- t = re.compile(r"wed", re.DOTALL | re.IGNORECASE).sub(daylist[2], t)
- t = re.compile(r"thu", re.DOTALL | re.IGNORECASE).sub(daylist[3], t)
- t = re.compile(r"fri", re.DOTALL | re.IGNORECASE).sub(daylist[4], t)
- t = re.compile(r"sat", re.DOTALL | re.IGNORECASE).sub(daylist[5], t)
- t = re.compile(r"sun", re.DOTALL | re.IGNORECASE).sub(daylist[6], t)
- return t
+ """
+ Format a datetime to a readable date
+ """
+ if not t:
+ t = datetime.datetime.now()
+
+ days = {'en': ['mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun'],
+ 'es': ['lun', 'mar', 'mie', 'jue', 'vie', 'sab', 'dom'],
+ 'jp': ['月', '火', '水', '木', '金', '土', '日']}
+
+ daylist = days[Settings.LANG]
+ format = "%d/%m/%y(%a)%H:%M:%S"
+
+ if not home:
+ try:
+ board = Settings._.BOARD
+ if board["dir"] == 'world':
+ daylist = days['en']
+ elif board["dir"] == '2d':
+ daylist = days['jp']
+ except:
+ pass
+
+ t = t.strftime(format)
+
+ t = re.compile(r"mon", re.DOTALL | re.IGNORECASE).sub(daylist[0], t)
+ t = re.compile(r"tue", re.DOTALL | re.IGNORECASE).sub(daylist[1], t)
+ t = re.compile(r"wed", re.DOTALL | re.IGNORECASE).sub(daylist[2], t)
+ t = re.compile(r"thu", re.DOTALL | re.IGNORECASE).sub(daylist[3], t)
+ t = re.compile(r"fri", re.DOTALL | re.IGNORECASE).sub(daylist[4], t)
+ t = re.compile(r"sat", re.DOTALL | re.IGNORECASE).sub(daylist[5], t)
+ t = re.compile(r"sun", re.DOTALL | re.IGNORECASE).sub(daylist[6], t)
+ return t
+
def formatTimestamp(t, home=False):
- """
- Format a timestamp to a readable date
- """
- return formatDate(datetime.datetime.fromtimestamp(int(t)), home)
+ """
+ Format a timestamp to a readable date
+ """
+ return formatDate(datetime.datetime.fromtimestamp(int(t)), home)
+
def timeTaken(time_start, time_finish):
- return str(round(time_finish - time_start, 3))
+ return str(round(time_finish - time_start, 3))
+
def parseIsoPeriod(t_str):
- m = re.match('P(?:(\d+)D)?T(?:(\d+)H)?(?:(\d+)M)?(\d+)S', t_str)
- if m:
- grps = [x for x in m.groups() if x]
- if len(grps) == 1:
- grps.insert(0, '0')
- grps[-1] = grps[-1].zfill(2)
- return ':'.join(grps)
- else:
- return '???'
-
+ m = re.match('P(?:(\d+)D)?T(?:(\d+)H)?(?:(\d+)M)?(\d+)S', t_str)
+ if m:
+ grps = [x for x in m.groups() if x]
+ if len(grps) == 1:
+ grps.insert(0, '0')
+ grps[-1] = grps[-1].zfill(2)
+ return ':'.join(grps)
+ else:
+ return '???'
+
+
def getFormData(self):
- """
- Process input sent to WSGI through a POST method and output it in an easy to
- retrieve format: dictionary of dictionaries in the format of {key: value}
- """
- # This must be done to avoid a bug in cgi.FieldStorage
- self.environ.setdefault("QUERY_STRING", "")
- if self.environ["QUERY_STRING"] == "rawpost":
- return None
- wsgi_input = self.environ["wsgi.input"]
- post_form = self.environ.get("wsgi.post_form")
- if (post_form is not None
- and post_form[0] is wsgi_input):
- return post_form[2]
- fs = cgi.FieldStorage(fp=wsgi_input,
- environ=self.environ,
- keep_blank_values=1)
- new_input = InputProcessed()
- post_form = (new_input, wsgi_input, fs)
- self.environ["wsgi.post_form"] = post_form
- self.environ["wsgi.input"] = new_input
-
- try:
- formdata = {}
- for key in dict(fs):
- try:
- formdata.update({key: fs[key].value})
- if key == "file":
- formdata.update({"file_original": secure_filename(fs[key].filename)})
- except AttributeError:
- formdata.update({key: fs[key]})
-
- return formdata
- except TypeError:
- return fs
+ """
+ Process input sent to WSGI through a POST method and output it in an easy to
+ retrieve format: dictionary of dictionaries in the format of {key: value}
+ """
+ # This must be done to avoid a bug in cgi.FieldStorage
+ self.environ.setdefault("QUERY_STRING", "")
+ if self.environ["QUERY_STRING"] == "rawpost":
+ return None
+ wsgi_input = self.environ["wsgi.input"]
+ post_form = self.environ.get("wsgi.post_form")
+ if (post_form is not None
+ and post_form[0] is wsgi_input):
+ return post_form[2]
+ fs = cgi.FieldStorage(fp=wsgi_input,
+ environ=self.environ,
+ keep_blank_values=1)
+ new_input = InputProcessed()
+ post_form = (new_input, wsgi_input, fs)
+ self.environ["wsgi.post_form"] = post_form
+ self.environ["wsgi.input"] = new_input
+
+ try:
+ formdata = {}
+ for key in dict(fs):
+ try:
+ formdata.update({key: fs[key].value})
+ if key == "file":
+ formdata.update(
+ {"file_original": secure_filename(fs[key].filename)})
+ except AttributeError:
+ formdata.update({key: fs[key]})
+
+ return formdata
+ except TypeError:
+ return fs
+
+
class InputProcessed(object):
- def read(self):
- raise EOFError("El stream de wsgi.input ya se ha consumido.")
- readline = readlines = __iter__ = read
+ def read(self):
+ raise EOFError("El stream de wsgi.input ya se ha consumido.")
+ readline = readlines = __iter__ = read
+
class UserError(Exception):
- pass
+ pass
+
def secure_filename(path):
- split = re.compile(r'[\0%s]' % re.escape(''.join([os.path.sep, os.path.altsep or ''])))
- return cgi.escape(split.sub('', path))
-
+ split = re.compile(r'[\0%s]' % re.escape(
+ ''.join([os.path.sep, os.path.altsep or ''])))
+ return cgi.escape(split.sub('', path))
+
+
def getMD5(data):
- m = hashlib.md5()
- m.update(data)
-
- return m.hexdigest()
+ m = hashlib.md5()
+ m.update(data)
+
+ return m.hexdigest()
+
def nullstr(len): return "\0" * len
+
def hide_data(data, length, key, secret):
- """
- Encrypts data, useful for tripcodes and IDs
- """
- crypt = rc4(nullstr(length), rc4(nullstr(32), key + secret) + data).encode('base64')
- return crypt.rstrip('\n')
+ """
+ Encrypts data, useful for tripcodes and IDs
+ """
+ crypt = rc4(nullstr(length), rc4(
+ nullstr(32), key + secret) + data).encode('base64')
+ return crypt.rstrip('\n')
+
def rc4(data, key):
- """
- rc4 implementation
- """
- x = 0
- box = range(256)
- for i in range(256):
- x = (x + box[i] + ord(key[i % len(key)])) % 256
- box[i], box[x] = box[x], box[i]
- x = 0
- y = 0
- out = []
- for char in data:
- x = (x + 1) % 256
- y = (y + box[x]) % 256
- box[x], box[y] = box[y], box[x]
- out.append(chr(ord(char) ^ box[(box[x] + box[y]) % 256]))
-
- return ''.join(out)
+ """
+ rc4 implementation
+ """
+ x = 0
+ box = range(256)
+ for i in range(256):
+ x = (x + box[i] + ord(key[i % len(key)])) % 256
+ box[i], box[x] = box[x], box[i]
+ x = 0
+ y = 0
+ out = []
+ for char in data:
+ x = (x + 1) % 256
+ y = (y + box[x]) % 256
+ box[x], box[y] = box[y], box[x]
+ out.append(chr(ord(char) ^ box[(box[x] + box[y]) % 256]))
+
+ return ''.join(out)
+
def getRandomLine(filename):
- import random
- f = open(filename, 'r')
- lines = f.readlines()
- num = random.randint(0, len(lines) - 1)
- return lines[num]
-
+ import random
+ f = open(filename, 'r')
+ lines = f.readlines()
+ num = random.randint(0, len(lines) - 1)
+ return lines[num]
+
+
def getRandomIco():
- from glob import glob
- from random import choice
- icons = glob("../static/ico/*")
- if icons:
- return choice(icons).lstrip('..')
- else:
- return ''
+ from glob import glob
+ from random import choice
+ icons = glob("../static/ico/*")
+ if icons:
+ return choice(icons).lstrip('..')
+ else:
+ return ''
+
def N_(message): return message
+
def getCookie(self, value=""):
- try:
- return urllib.unquote_plus(self._cookies[value].value)
- except KeyError:
- return None
+ try:
+ return urllib.unquote_plus(self._cookies[value].value)
+ except KeyError:
+ return None
+
def reCookie(self, key, value=""):
- board = Settings._.BOARD
- setCookie(self, key, value)
+ board = Settings._.BOARD
+ setCookie(self, key, value)
+
def setCookie(self, key, value="", max_age=None, expires=None, path="/", domain=None, secure=None):
- """
- Copied from Colubrid
- """
- if self._newcookies is None:
- self._newcookies = SimpleCookie()
- self._newcookies[key] = urllib.quote_plus(value)
- if not max_age is None:
- self._newcookies[key]["max-age"] = max_age
- if not expires is None:
- if isinstance(expires, basestring):
- self._newcookies[key]["expires"] = expires
- expires = None
- elif isinstance(expires, datetime):
- expires = expires.utctimetuple()
- elif not isinstance(expires, (int, long)):
- expires = datetime.datetime.gmtime(expires)
- else:
- raise ValueError("Se requiere de un entero o un datetime")
+ """
+ Copied from Colubrid
+ """
+ if self._newcookies is None:
+ self._newcookies = SimpleCookie()
+ self._newcookies[key] = urllib.quote_plus(value)
+ if not max_age is None:
+ self._newcookies[key]["max-age"] = max_age
if not expires is None:
- now = datetime.datetime.gmtime()
- month = _([N_("Jan"), N_("Feb"), N_("Mar"), N_("Apr"), N_("May"), N_("Jun"), N_("Jul"),
- N_("Aug"), N_("Sep"), N_("Oct"), N_("Nov"), N_("Dec")][now.tm_mon - 1])
- day = _([N_("Monday"), N_("Tuesday"), N_("Wednesday"), N_("Thursday"),
- N_("Friday"), N_("Saturday"), N_("Sunday")][expires.tm_wday])
- date = "%02d-%s-%s" % (
- now.tm_mday, month, str(now.tm_year)[-2:]
- )
- d = "%s, %s %02d:%02d:%02d GMT" % (day, date, now.tm_hour,
- now.tm_min, now.tm_sec)
- self._newcookies[key]["expires"] = d
- if not path is None:
- self._newcookies[key]["path"] = path
- if not domain is None:
- if domain != "THIS":
- self._newcookies[key]["domain"] = domain
- if not secure is None:
- self._newcookies[key]["secure"] = secure
+ if isinstance(expires, basestring):
+ self._newcookies[key]["expires"] = expires
+ expires = None
+ elif isinstance(expires, datetime):
+ expires = expires.utctimetuple()
+ elif not isinstance(expires, (int, long)):
+ expires = datetime.datetime.gmtime(expires)
+ else:
+ raise ValueError("Se requiere de un entero o un datetime")
+ if not expires is None:
+ now = datetime.datetime.gmtime()
+ month = _([N_("Jan"), N_("Feb"), N_("Mar"), N_("Apr"), N_("May"), N_("Jun"), N_("Jul"),
+ N_("Aug"), N_("Sep"), N_("Oct"), N_("Nov"), N_("Dec")][now.tm_mon - 1])
+ day = _([N_("Monday"), N_("Tuesday"), N_("Wednesday"), N_("Thursday"),
+ N_("Friday"), N_("Saturday"), N_("Sunday")][expires.tm_wday])
+ date = "%02d-%s-%s" % (
+ now.tm_mday, month, str(now.tm_year)[-2:]
+ )
+ d = "%s, %s %02d:%02d:%02d GMT" % (day, date, now.tm_hour,
+ now.tm_min, now.tm_sec)
+ self._newcookies[key]["expires"] = d
+ if not path is None:
+ self._newcookies[key]["path"] = path
+ if not domain is None:
+ if domain != "THIS":
+ self._newcookies[key]["domain"] = domain
+ if not secure is None:
+ self._newcookies[key]["secure"] = secure
+
def deleteCookie(self, key):
- """
- Copied from Colubrid
- """
- if key not in self._cookies:
- return # Cookie doesn't exist
- if self._newcookies is None:
- self._newcookies = SimpleCookie()
- self._newcookies[key] = ""
- if self._cookies[key]["path"]:
- self._newcookies[key]["path"] = self._cookies[key]["path"]
- else:
- self._newcookies[key]["path"] = "/"
- self._newcookies[key]["domain"] = self._cookies[key]["domain"]
- self._newcookies[key]["expires"] = "Thu, 01 Jan 1970 00:00:00 GMT"
-
-def elapsed_time(seconds, suffixes=['y','w','d','h','m','s'], add_s=False, separator=' '):
- """
- Takes an amount of seconds and turns it into a human-readable amount of time.
- """
- # the formatted time string to be returned
- time = []
-
- # the pieces of time to iterate over (days, hours, minutes, etc)
- # - the first piece in each tuple is the suffix (d, h, w)
- # - the second piece is the length in seconds (a day is 60s * 60m * 24h)
- parts = [(suffixes[0], 60 * 60 * 24 * 7 * 52),
- (suffixes[1], 60 * 60 * 24 * 7),
- (suffixes[2], 60 * 60 * 24),
- (suffixes[3], 60 * 60),
- (suffixes[4], 60),
- (suffixes[5], 1)]
-
- # for each time piece, grab the value and remaining seconds, and add it to
- # the time string
- for suffix, length in parts:
- value = seconds / length
- if value > 0:
- seconds = seconds % length
- time.append('%s%s' % (str(value),
- (suffix, (suffix, suffix + 's')[value > 1])[add_s]))
- if seconds < 1:
- break
-
- return separator.join(time)
+ """
+ Copied from Colubrid
+ """
+ if key not in self._cookies:
+ return # Cookie doesn't exist
+ if self._newcookies is None:
+ self._newcookies = SimpleCookie()
+ self._newcookies[key] = ""
+ if self._cookies[key]["path"]:
+ self._newcookies[key]["path"] = self._cookies[key]["path"]
+ else:
+ self._newcookies[key]["path"] = "/"
+ self._newcookies[key]["domain"] = self._cookies[key]["domain"]
+ self._newcookies[key]["expires"] = "Thu, 01 Jan 1970 00:00:00 GMT"
+
+
+def elapsed_time(seconds, suffixes=['y', 'w', 'd', 'h', 'm', 's'], add_s=False, separator=' '):
+ """
+ Takes an amount of seconds and turns it into a human-readable amount of time.
+ """
+ # the formatted time string to be returned
+ time = []
+
+ # the pieces of time to iterate over (days, hours, minutes, etc)
+ # - the first piece in each tuple is the suffix (d, h, w)
+ # - the second piece is the length in seconds (a day is 60s * 60m * 24h)
+ parts = [(suffixes[0], 60 * 60 * 24 * 7 * 52),
+ (suffixes[1], 60 * 60 * 24 * 7),
+ (suffixes[2], 60 * 60 * 24),
+ (suffixes[3], 60 * 60),
+ (suffixes[4], 60),
+ (suffixes[5], 1)]
+
+ # for each time piece, grab the value and remaining seconds, and add it to
+ # the time string
+ for suffix, length in parts:
+ value = seconds / length
+ if value > 0:
+ seconds = seconds % length
+ time.append('%s%s' % (str(value),
+ (suffix, (suffix, suffix + 's')[value > 1])[add_s]))
+ if seconds < 1:
+ break
+
+ return separator.join(time)
+
def inet_aton(ip_string):
- import socket, struct
- return struct.unpack('!L',socket.inet_aton(ip_string))[0]
+ import socket
+ import struct
+ return struct.unpack('!L', socket.inet_aton(ip_string))[0]
+
def inet_ntoa(packed_ip):
- import socket, struct
- return socket.inet_ntoa(struct.pack('!L',packed_ip))
+ import socket
+ import struct
+ return socket.inet_ntoa(struct.pack('!L', packed_ip))
+
def is_bad_proxy(pip):
- import urllib2
- import socket
- socket.setdefaulttimeout(3)
-
- try:
- proxy_handler = urllib2.ProxyHandler({'http': pip})
- opener = urllib2.build_opener(proxy_handler)
- opener.addheaders = [('User-agent', 'Mozilla/5.0')]
- urllib2.install_opener(opener)
- req=urllib2.Request('http://bienvenidoainternet.org')
- sock=urllib2.urlopen(req)
- except urllib2.HTTPError, e:
- return e.code
- except Exception, detail:
- return True
- return False
+ import urllib2
+ import socket
+ socket.setdefaulttimeout(3)
+
+ try:
+ proxy_handler = urllib2.ProxyHandler({'http': pip})
+ opener = urllib2.build_opener(proxy_handler)
+ opener.addheaders = [('User-agent', 'Mozilla/5.0')]
+ urllib2.install_opener(opener)
+ req = urllib2.Request('http://bienvenidoainternet.org')
+ sock = urllib2.urlopen(req)
+ except urllib2.HTTPError, e:
+ return e.code
+ except Exception, detail:
+ return True
+ return False
+
def send_mail(subject, srcmsg):
- import smtplib
- from email.mime.text import MIMEText
-
- msg = MIMEText(srcmsg)
- me = 'weabot@bienvenidoainternet.org'
- you = 'burocracia@bienvenidoainternet.org'
-
- msg['Subject'] = 'The contents of %s' % textfile
- msg['From'] = me
- msg['To'] = you
-
- s = smtplib.SMTP('localhost')
- s.sendmail(me, [you], msg.as_string())
- s.quit()
+ import smtplib
+ from email.mime.text import MIMEText
+
+ msg = MIMEText(srcmsg)
+ me = 'weabot@bienvenidoainternet.org'
+ you = 'burocracia@bienvenidoainternet.org'
+
+ msg['Subject'] = 'The contents of %s' % textfile
+ msg['From'] = me
+ msg['To'] = you
+
+ s = smtplib.SMTP('localhost')
+ s.sendmail(me, [you], msg.as_string())
+ s.quit()
+
class weabotLogger:
- def __init__(self):
- self.times = []
-
- def log(self, message):
- self.times.append([time.time(), message])
-
- def allTimes(self):
- output = "Time Logged action\n--------------------------\n"
- start = self.times[0][0]
- for time in self.times:
- difference = str(time[0] - start)
- difference_split = difference.split(".")
- if len(difference_split[0]) < 2:
- difference_split[0] = "0" + difference_split[0]
-
- if len(difference_split[1]) < 7:
- difference_split[1] = ("0" * (7 - len(difference_split[1]))) + difference_split[1]
- elif len(difference_split[1]) > 7:
- difference_split[1] = difference_split[1][:7]
-
- output += ".".join(difference_split) + " " + time[1] + "\n"
-
- return output
+ def __init__(self):
+ self.times = []
+
+ def log(self, message):
+ self.times.append([time.time(), message])
+
+ def allTimes(self):
+ output = "Time Logged action\n--------------------------\n"
+ start = self.times[0][0]
+ for time in self.times:
+ difference = str(time[0] - start)
+ difference_split = difference.split(".")
+ if len(difference_split[0]) < 2:
+ difference_split[0] = "0" + difference_split[0]
+
+ if len(difference_split[1]) < 7:
+ difference_split[1] = (
+ "0" * (7 - len(difference_split[1]))) + difference_split[1]
+ elif len(difference_split[1]) > 7:
+ difference_split[1] = difference_split[1][:7]
+
+ output += ".".join(difference_split) + " " + time[1] + "\n"
+
+ return output
+
logger = weabotLogger()
+
+
def logTime(message):
- global logger
- logger.log(message)
+ global logger
+ logger.log(message)
+
def logTimes():
- global logger
- return logger.allTimes()
+ global logger
+ return logger.allTimes()
diff --git a/cgi/geoip.py b/cgi/geoip.py
index 0bcb3d8..36c25d1 100644
--- a/cgi/geoip.py
+++ b/cgi/geoip.py
@@ -41,6 +41,7 @@ countries = (
'VU', 'WF', 'WS', 'YE', 'YT', 'RS', 'ZA', 'ZM', 'ME', 'ZW', 'A1', 'A2', 'O1',
'AX', 'GG', 'IM', 'JE', 'BL', 'MF')
+
def iptonum(ip):
"""Convert IP address string to 32-bit integer, or return None if IP is bad.
@@ -70,9 +71,11 @@ def iptonum(ip):
num = num << 8 | segment
return num
+
class DatabaseError(Exception):
pass
+
class GeoIP(object):
"""Wraps GeoIP country database lookup into a class."""
@@ -119,10 +122,12 @@ class GeoIP(object):
offset = x[i]
raise DatabaseError('GeoIP database corrupt: offset=%s' % offset)
+
def country(ip, dbname='GeoIP.dat'):
"""Helper function that creates a GeoIP instance and calls country()."""
return GeoIP(dbname).country(ip)
+
if __name__ == '__main__':
import doctest
doctest.testmod()
diff --git a/cgi/img.py b/cgi/img.py
index 3561672..ef64ac0 100644
--- a/cgi/img.py
+++ b/cgi/img.py
@@ -10,411 +10,437 @@ from settings import Settings
from database import *
from framework import *
-try: # Windows needs stdio set for binary mode.
- import msvcrt
- msvcrt.setmode (0, os.O_BINARY) # stdin = 0
- msvcrt.setmode (1, os.O_BINARY) # stdout = 1
+try: # Windows needs stdio set for binary mode.
+ import msvcrt
+ msvcrt.setmode(0, os.O_BINARY) # stdin = 0
+ msvcrt.setmode(1, os.O_BINARY) # stdout = 1
except ImportError:
- pass
+ pass
-def processImage(post, data, t, originalname, spoiler=False):
- """
- Take all post data from <post>, process uploaded file in <data>, and calculate
- file names using datetime <t>
- Returns updated <post> with file and thumb values
- """
- board = Settings._.BOARD
-
- used_filetype = None
-
- # get image information
- content_type, width, height, size, extra = getImageInfo(data)
-
- # check the size is fine
- if size > int(board["maxsize"])*1024:
- raise UserError, _("File too big. The maximum file size is: %s") % board['maxsize']
-
- # check if file is supported
- for filetype in board['filetypes']:
- if content_type == filetype['mime']:
- used_filetype = filetype
- break
-
- if not used_filetype:
- raise UserError, _("File type not supported.")
-
- # check if file is already posted
- is_duplicate = checkFileDuplicate(data)
- if checkFileDuplicate(data)[0]:
- raise UserError, _("This image has already been posted %s.") % ('<a href="' + Settings.BOARDS_URL + board['dir'] + '/res/' + str(is_duplicate[1]) + '.html#' + str(is_duplicate[2]) + '">' + _("here") + '</a>')
-
- # prepare file names
- if used_filetype['preserve_name'] == '1':
- file_base = os.path.splitext(originalname)[0] # use original filename
- else:
- file_base = '%d' % int(t * 1000) # generate timestamp name
- file_name = file_base + "." + used_filetype['ext']
- file_thumb_name = file_base + "s.jpg"
-
- # prepare paths
- file_path = Settings.IMAGES_DIR + board["dir"] + "/src/" + file_name
- file_thumb_path = Settings.IMAGES_DIR + board["dir"] + "/thumb/" + file_thumb_name
- file_mobile_path = Settings.IMAGES_DIR + board["dir"] + "/mobile/" + file_thumb_name
- file_cat_path = Settings.IMAGES_DIR + board["dir"] + "/cat/" + file_thumb_name
-
- # remove EXIF data if necessary for privacy
- if content_type == 'image/jpeg':
- data = removeExifData(data)
-
- # write file
- f = open(file_path, "wb")
- try:
- f.write(data)
- finally:
- f.close()
-
- # set maximum dimensions
- maxsize = int(board['thumb_px'])
-
- post["file"] = file_name
- post["image_width"] = width
- post["image_height"] = height
-
- # Do we need to thumbnail it?
- if not used_filetype['image']:
- # make thumbnail
- file_thumb_width, file_thumb_height = getThumbDimensions(width, height, maxsize)
-
- if used_filetype['ffmpeg_thumb'] == '1':
- # use ffmpeg to make thumbnail
- logTime("Generating thumbnail")
-
- if used_filetype['mime'][:5] == 'video':
- retcode = subprocess.call([
- Settings.FFMPEG_PATH, '-strict', '-2', '-ss', '0', '-i', file_path,
- '-v', 'quiet', '-an', '-vframes', '1', '-f', 'mjpeg', '-vf', 'scale=%d:%d' % (file_thumb_width, file_thumb_height),
- '-threads', '1', file_thumb_path])
- if spoiler:
- args = [Settings.CONVERT_PATH, file_thumb_path, "-limit", "thread", "1", "-background", "white", "-flatten", "-resize", "%dx%d" % (file_thumb_width, file_thumb_height), "-blur", "0x12", "-gravity", "center", "-fill", "rgba(0,0,0, .6)", "-draw", "rectangle 0,%d,%d,%d" % ((file_thumb_height/2)-10, file_thumb_width, (file_thumb_height/2)+7), "-fill", "white", "-annotate", "0", "Alerta de spoiler", "-quality", str(Settings.THUMB_QUALITY), file_thumb_path]
- retcode = subprocess.call(args)
- elif used_filetype['mime'][:5] == 'audio':
- # we do an exception and use png for audio waveform thumbnails since they
- # 1. are smaller 2. allow for transparency
- file_thumb_name = file_thumb_name[:-3] + "png"
- file_thumb_path = file_thumb_path[:-3] + "png"
- file_mobile_path = file_mobile_path[:-3] + "png"
- file_cat_path = file_cat_path[:-3] + "png"
-
- if int(board['thumb_px']) > 149:
- file_thumb_width = board['thumb_px']
- file_thumb_height = float(int(board['thumb_px'])/2)
- else:
- file_thumb_width = 150
- file_thumb_height = 75
- retcode = subprocess.call([
- Settings.FFMPEG_PATH, '-t', '300', '-i', file_path,
- '-filter_complex', 'showwavespic=s=%dx%d:split_channels=1' % (int(file_thumb_width), int(file_thumb_height)),
- '-frames:v', '1', '-threads', '1', file_thumb_path])
+def processImage(post, data, t, originalname, spoiler=False):
+ """
+ Take all post data from <post>, process uploaded file in <data>, and calculate
+ file names using datetime <t>
+ Returns updated <post> with file and thumb values
+ """
+ board = Settings._.BOARD
+
+ used_filetype = None
+
+ # get image information
+ content_type, width, height, size, extra = getImageInfo(data)
+
+ # check the size is fine
+ if size > int(board["maxsize"])*1024:
+ raise UserError, _("File too big. The maximum file size is: %s") % board['maxsize']
+
+ # check if file is supported
+ for filetype in board['filetypes']:
+ if content_type == filetype['mime']:
+ used_filetype = filetype
+ break
+
+ if not used_filetype:
+ raise UserError, _("File type not supported.")
+
+ # check if file is already posted
+ is_duplicate = checkFileDuplicate(data)
+ if checkFileDuplicate(data)[0]:
+ raise UserError, _("This image has already been posted %s.") % ('<a href="' + Settings.BOARDS_URL + board['dir'] + '/res/' + str(is_duplicate[1]) + '.html#' + str(is_duplicate[2]) + '">' + _("here") + '</a>')
+
+ # prepare file names
+ if used_filetype['preserve_name'] == '1':
+ file_base = os.path.splitext(originalname)[0] # use original filename
+ else:
+ file_base = '%d' % int(t * 1000) # generate timestamp name
+ file_name = file_base + "." + used_filetype['ext']
+ file_thumb_name = file_base + "s.jpg"
+
+ # prepare paths
+ file_path = Settings.IMAGES_DIR + board["dir"] + "/src/" + file_name
+ file_thumb_path = Settings.IMAGES_DIR + \
+ board["dir"] + "/thumb/" + file_thumb_name
+ file_mobile_path = Settings.IMAGES_DIR + \
+ board["dir"] + "/mobile/" + file_thumb_name
+ file_cat_path = Settings.IMAGES_DIR + \
+ board["dir"] + "/cat/" + file_thumb_name
+
+ # remove EXIF data if necessary for privacy
+ if content_type == 'image/jpeg':
+ data = removeExifData(data)
+
+ # write file
+ f = open(file_path, "wb")
+ try:
+ f.write(data)
+ finally:
+ f.close()
+
+ # set maximum dimensions
+ maxsize = int(board['thumb_px'])
+
+ post["file"] = file_name
+ post["image_width"] = width
+ post["image_height"] = height
+
+ # Do we need to thumbnail it?
+ if not used_filetype['image']:
+ # make thumbnail
+ file_thumb_width, file_thumb_height = getThumbDimensions(
+ width, height, maxsize)
+
+ if used_filetype['ffmpeg_thumb'] == '1':
+ # use ffmpeg to make thumbnail
+ logTime("Generating thumbnail")
+
+ if used_filetype['mime'][:5] == 'video':
+ retcode = subprocess.call([
+ Settings.FFMPEG_PATH, '-strict', '-2', '-ss', '0', '-i', file_path,
+ '-v', 'quiet', '-an', '-vframes', '1', '-f', 'mjpeg', '-vf', 'scale=%d:%d' % (
+ file_thumb_width, file_thumb_height),
+ '-threads', '1', file_thumb_path])
+ if spoiler:
+ args = [Settings.CONVERT_PATH, file_thumb_path, "-limit", "thread", "1", "-background", "white", "-flatten", "-resize", "%dx%d" % (file_thumb_width, file_thumb_height), "-blur", "0x12", "-gravity", "center", "-fill", "rgba(0,0,0, .6)", "-draw", "rectangle 0,%d,%d,%d" % (
+ (file_thumb_height/2)-10, file_thumb_width, (file_thumb_height/2)+7), "-fill", "white", "-annotate", "0", "Alerta de spoiler", "-quality", str(Settings.THUMB_QUALITY), file_thumb_path]
+ retcode = subprocess.call(args)
+ elif used_filetype['mime'][:5] == 'audio':
+ # we do an exception and use png for audio waveform thumbnails since they
+ # 1. are smaller 2. allow for transparency
+ file_thumb_name = file_thumb_name[:-3] + "png"
+ file_thumb_path = file_thumb_path[:-3] + "png"
+ file_mobile_path = file_mobile_path[:-3] + "png"
+ file_cat_path = file_cat_path[:-3] + "png"
+
+ if int(board['thumb_px']) > 149:
+ file_thumb_width = board['thumb_px']
+ file_thumb_height = float(int(board['thumb_px'])/2)
+ else:
+ file_thumb_width = 150
+ file_thumb_height = 75
+
+ retcode = subprocess.call([
+ Settings.FFMPEG_PATH, '-t', '300', '-i', file_path,
+ '-filter_complex', 'showwavespic=s=%dx%d:split_channels=1' % (
+ int(file_thumb_width), int(file_thumb_height)),
+ '-frames:v', '1', '-threads', '1', file_thumb_path])
# elif used_filetype['mime'] == 'application/x-shockwave-flash' or used_filetype['mime'] == 'mime/x-shockwave-flash':
# retcode = subprocess.call([
# './ffmpeg', '-i', file_path, '-vcodec', 'mjpeg', '-vframes', '1', '-an', '-f', 'rawvideo',
# '-vf', 'scale=%d:%d' % (file_thumb_width, file_thumb_height), '-threads', '1', file_thumb_path])
- if retcode != 0:
- os.remove(file_path)
- raise UserError, _("Thumbnail creation failure.") + ' ('+str(retcode)+')'
- else:
- # use imagemagick to make thumbnail
- args = [Settings.CONVERT_PATH, file_path, "-limit", "thread", "1", "-background", "white", "-flatten", "-resize", "%dx%d" % (file_thumb_width, file_thumb_height)]
- if spoiler:
- args += ["-blur", "0x12", "-gravity", "center", "-fill", "rgba(0,0,0, .6)", "-draw", "rectangle 0,%d,%d,%d" % ((file_thumb_height/2)-10, file_thumb_width, (file_thumb_height/2)+7), "-fill", "white", "-annotate", "0", "Alerta de spoiler"]
- args += ["-quality", str(Settings.THUMB_QUALITY), file_thumb_path]
-
- # generate thumbnails
- logTime("Generating thumbnail")
- retcode = subprocess.call(args)
- if retcode != 0:
- os.remove(file_path)
- raise UserError, _("Thumbnail creation failure.") + ' ('+str(retcode)+')'
-
- # check if thumbnail was truly created
- try:
- open(file_thumb_path)
- except:
- os.remove(file_path)
- raise UserError, _("Thumbnail creation failure.")
-
- # create extra thumbnails (catalog/mobile)
- subprocess.call([Settings.CONVERT_PATH, file_thumb_path, "-limit" , "thread", "1", "-resize", "100x100", "-quality", "75", file_mobile_path])
- if not post["parentid"]:
- subprocess.call([Settings.CONVERT_PATH, file_thumb_path, "-limit" , "thread", "1", "-resize", "150x150", "-quality", "60", file_cat_path])
-
- post["thumb"] = file_thumb_name
- post["thumb_width"] = file_thumb_width
- post["thumb_height"] = file_thumb_height
- else:
- # Don't thumbnail and use mime image
- if board["board_type"] == '0':
- post["thumb"] = used_filetype['image']
- post["thumb_width"] = '120'
- post["thumb_height"] = '120'
+ if retcode != 0:
+ os.remove(file_path)
+ raise UserError, _("Thumbnail creation failure.") + ' ('+str(retcode)+')'
+ else:
+ # use imagemagick to make thumbnail
+ args = [Settings.CONVERT_PATH, file_path, "-limit", "thread", "1", "-background",
+ "white", "-flatten", "-resize", "%dx%d" % (file_thumb_width, file_thumb_height)]
+ if spoiler:
+ args += ["-blur", "0x12", "-gravity", "center", "-fill", "rgba(0,0,0, .6)", "-draw", "rectangle 0,%d,%d,%d" % (
+ (file_thumb_height/2)-10, file_thumb_width, (file_thumb_height/2)+7), "-fill", "white", "-annotate", "0", "Alerta de spoiler"]
+ args += ["-quality", str(Settings.THUMB_QUALITY), file_thumb_path]
+
+ # generate thumbnails
+ logTime("Generating thumbnail")
+ retcode = subprocess.call(args)
+ if retcode != 0:
+ os.remove(file_path)
+ raise UserError, _("Thumbnail creation failure.") + ' ('+str(retcode)+')'
+
+ # check if thumbnail was truly created
+ try:
+ open(file_thumb_path)
+ except:
+ os.remove(file_path)
+ raise UserError, _("Thumbnail creation failure.")
+
+ # create extra thumbnails (catalog/mobile)
+ subprocess.call([Settings.CONVERT_PATH, file_thumb_path, "-limit", "thread",
+ "1", "-resize", "100x100", "-quality", "75", file_mobile_path])
+ if not post["parentid"]:
+ subprocess.call([Settings.CONVERT_PATH, file_thumb_path, "-limit",
+ "thread", "1", "-resize", "150x150", "-quality", "60", file_cat_path])
+
+ post["thumb"] = file_thumb_name
+ post["thumb_width"] = file_thumb_width
+ post["thumb_height"] = file_thumb_height
else:
- post["thumb"] = used_filetype['image'].split(".")[0] + '_small.png'
- post["thumb_width"] = '90'
- post["thumb_height"] = '90'
-
- # calculate size (bytes)
- post["file_size"] = len(data)
-
- # add additional metadata, if any
- post["message"] += extraInfo(content_type, file_name, file_path)
-
- # file md5
- post["file_hex"] = getMD5(data)
-
- return post
+ # Don't thumbnail and use mime image
+ if board["board_type"] == '0':
+ post["thumb"] = used_filetype['image']
+ post["thumb_width"] = '120'
+ post["thumb_height"] = '120'
+ else:
+ post["thumb"] = used_filetype['image'].split(".")[0] + '_small.png'
+ post["thumb_width"] = '90'
+ post["thumb_height"] = '90'
+
+ # calculate size (bytes)
+ post["file_size"] = len(data)
+
+ # add additional metadata, if any
+ post["message"] += extraInfo(content_type, file_name, file_path)
+
+ # file md5
+ post["file_hex"] = getMD5(data)
+
+ return post
+
def extraInfo(mime, file_name, file_path):
- board = Settings._.BOARD
-
- if mime in ['audio/ogg', 'audio/opus', 'audio/mpeg', 'video/webm', 'video/mp4']:
- info = ffprobe_f(file_path)
- extra = {}
- credit_str = ""
-
- if mime == 'video/webm':
- for s in info['streams']:
- if 'width' in s:
- stream = s
- else:
- stream = info['streams'][0]
-
- extra['codec'] = stream.get('codec_name', '').encode('utf-8')
- format = info['format']
-
- if 'bit_rate' in format:
- extra['codec'] += ' ~%d kbps' % int(int(format['bit_rate']) / 1000)
- if 'tags' in format:
- extra['title'] = format['tags'].get('TITLE', format['tags'].get('title', '')).encode('utf-8')
- extra['artist'] = format['tags'].get('ARTIST', format['tags'].get('artist', '')).encode('utf-8')
- if extra['title'] or extra['artist']:
- credit_str = ' - '.join((extra['artist'], extra['title'])) + ' '
- if 'tags' in stream:
- extra['title'] = stream['tags'].get('TITLE', '').encode('utf-8')
- extra['artist'] = stream['tags'].get('ARTIST', '').encode('utf-8')
- if extra['title'] or extra['artist']:
- credit_str = ' - '.join((extra['artist'], extra['title'])) + ' '
-
- return '<hr /><small>%s(%s)</small>' % (credit_str, extra['codec'])
-
- elif mime in ['audio/mod', 'audio/xm', 'audio/s3m']:
- ext = mime.split('/')[1].upper()
- url = '/cgi/play/%s/%s' % (board['dir'], file_name)
- return '<hr /><small>Módulo tracker (%s) [<a href="%s" target="_blank">Click para escuchar</a>]</small>' % (ext, url)
-
- return ''
-
-def getImageInfo(data):
- data = str(data)
- size = len(data)
- height = -1
- width = -1
- extra = {}
- content_type = ""
-
- # handle GIFs
- if (size >= 10) and data[:6] in ("GIF87a", "GIF89a"):
- # Check to see if content_type is correct
- content_type = "image/gif"
- w, h = struct.unpack("<HH", data[6:10])
- width = int(w)
- height = int(h)
-
- # See PNG 2. Edition spec (http://www.w3.org/TR/PNG/)
- # Bytes 0-7 are below, 4-byte chunk length, then 'IHDR'
- # and finally the 4-byte width, height
- elif ((size >= 24) and data.startswith("\211PNG\r\n\032\n")
- and (data[12:16] == "IHDR")):
- content_type = "image/png"
- w, h = struct.unpack(">LL", data[16:24])
- width = int(w)
- height = int(h)
-
- # Maybe this is for an older PNG version.
- elif (size >= 16) and data.startswith("\211PNG\r\n\032\n"):
- # Check to see if we have the right content type
- content_type = "image/png"
- w, h = struct.unpack(">LL", data[8:16])
- width = int(w)
- height = int(h)
-
- # handle JPEGs
- elif (size >= 2) and data.startswith("\377\330"):
- content_type = "image/jpeg"
- jpeg = StringIO(data)
- jpeg.read(2)
- b = jpeg.read(1)
- try:
- while (b and ord(b) != 0xDA):
- while (ord(b) != 0xFF): b = jpeg.read
- while (ord(b) == 0xFF): b = jpeg.read(1)
- if (ord(b) >= 0xC0 and ord(b) <= 0xC3):
- jpeg.read(3)
- h, w = struct.unpack(">HH", jpeg.read(4))
- break
+ board = Settings._.BOARD
+
+ if mime in ['audio/ogg', 'audio/opus', 'audio/mpeg', 'video/webm', 'video/mp4']:
+ info = ffprobe_f(file_path)
+ extra = {}
+ credit_str = ""
+
+ if mime == 'video/webm':
+ for s in info['streams']:
+ if 'width' in s:
+ stream = s
else:
- jpeg.read(int(struct.unpack(">H", jpeg.read(2))[0])-2)
+ stream = info['streams'][0]
+
+ extra['codec'] = stream.get('codec_name', '').encode('utf-8')
+ format = info['format']
+
+ if 'bit_rate' in format:
+ extra['codec'] += ' ~%d kbps' % int(int(format['bit_rate']) / 1000)
+ if 'tags' in format:
+ extra['title'] = format['tags'].get(
+ 'TITLE', format['tags'].get('title', '')).encode('utf-8')
+ extra['artist'] = format['tags'].get(
+ 'ARTIST', format['tags'].get('artist', '')).encode('utf-8')
+ if extra['title'] or extra['artist']:
+ credit_str = ' - '.join((extra['artist'],
+ extra['title'])) + ' '
+ if 'tags' in stream:
+ extra['title'] = stream['tags'].get('TITLE', '').encode('utf-8')
+ extra['artist'] = stream['tags'].get('ARTIST', '').encode('utf-8')
+ if extra['title'] or extra['artist']:
+ credit_str = ' - '.join((extra['artist'],
+ extra['title'])) + ' '
+
+ return '<hr /><small>%s(%s)</small>' % (credit_str, extra['codec'])
+
+ elif mime in ['audio/mod', 'audio/xm', 'audio/s3m']:
+ ext = mime.split('/')[1].upper()
+ url = '/cgi/play/%s/%s' % (board['dir'], file_name)
+ return '<hr /><small>Módulo tracker (%s) [<a href="%s" target="_blank">Click para escuchar</a>]</small>' % (ext, url)
+
+ return ''
+
+
+def getImageInfo(data):
+ data = str(data)
+ size = len(data)
+ height = -1
+ width = -1
+ extra = {}
+ content_type = ""
+
+ # handle GIFs
+ if (size >= 10) and data[:6] in ("GIF87a", "GIF89a"):
+ # Check to see if content_type is correct
+ content_type = "image/gif"
+ w, h = struct.unpack("<HH", data[6:10])
+ width = int(w)
+ height = int(h)
+
+ # See PNG 2. Edition spec (http://www.w3.org/TR/PNG/)
+ # Bytes 0-7 are below, 4-byte chunk length, then 'IHDR'
+ # and finally the 4-byte width, height
+ elif ((size >= 24) and data.startswith("\211PNG\r\n\032\n")
+ and (data[12:16] == "IHDR")):
+ content_type = "image/png"
+ w, h = struct.unpack(">LL", data[16:24])
+ width = int(w)
+ height = int(h)
+
+ # Maybe this is for an older PNG version.
+ elif (size >= 16) and data.startswith("\211PNG\r\n\032\n"):
+ # Check to see if we have the right content type
+ content_type = "image/png"
+ w, h = struct.unpack(">LL", data[8:16])
+ width = int(w)
+ height = int(h)
+
+ # handle JPEGs
+ elif (size >= 2) and data.startswith("\377\330"):
+ content_type = "image/jpeg"
+ jpeg = StringIO(data)
+ jpeg.read(2)
b = jpeg.read(1)
- width = int(w)
- height = int(h)
- except struct.error:
- pass
- except ValueError:
- pass
-
- # handle WebM
- elif (size >= 4) and data.startswith("\x1A\x45\xDF\xA3"):
- content_type = "video/webm"
- info = ffprobe(data)
-
- # handle mp4
- elif (size >= 8) and data[4:12] in ["ftypmp42", "ftypisom"]:
- content_type = "video/mp4"
-
- # handle ogg formats (vorbis/opus)
- elif (size >= 64) and data[:4] == "OggS":
- if data[28:35] == "\x01vorbis":
- content_type = "audio/ogg"
- elif data[28:36] == "OpusHead":
- content_type = "audio/opus"
-
- # handle MP3
- elif (size >= 64) and (data[:3] == "ID3" or data[:3] == "\xFF\xFB"):
- content_type = "audio/mpeg"
-
- # handle MOD
- elif (size >= 64) and data[1080:1084] == "M.K.":
- content_type = "audio/mod"
-
- # handle XM
- elif (size >= 64) and data.startswith("Extended Module:"):
- content_type = "audio/xm"
-
- # handle S3M
- elif (size >= 64) and data[25:32] == "\x00\x00\x00\x1A\x10\x00\x00":
- content_type = "audio/s3m"
-
- # handle PDF
- elif (size >= 4) and data[:7] == "%PDF-1.":
- content_type = "application/pdf"
-
- # handle Shockwave Flash
- elif (size >= 3) and data[:3] in ["CWS", "FWS"]:
- content_type = "application/x-shockwave-flash"
-
- # handle torrent
- elif (size >= 11) and data[:11] == "d8:announce":
- content_type = "application/x-bittorrent"
-
- # handle PDF
- elif (size >= 2) and data[:2] == "PK":
- content_type = "application/epub+zip"
-
- if content_type.startswith("video"):
- info = ffprobe(data)
- for stream in info['streams']:
- if 'width' in stream:
- width = stream['width']
- height = stream['height']
- break
-
-
- return content_type, width, height, size, extra
+ try:
+ while (b and ord(b) != 0xDA):
+ while (ord(b) != 0xFF):
+ b = jpeg.read
+ while (ord(b) == 0xFF):
+ b = jpeg.read(1)
+ if (ord(b) >= 0xC0 and ord(b) <= 0xC3):
+ jpeg.read(3)
+ h, w = struct.unpack(">HH", jpeg.read(4))
+ break
+ else:
+ jpeg.read(int(struct.unpack(">H", jpeg.read(2))[0])-2)
+ b = jpeg.read(1)
+ width = int(w)
+ height = int(h)
+ except struct.error:
+ pass
+ except ValueError:
+ pass
+
+ # handle WebM
+ elif (size >= 4) and data.startswith("\x1A\x45\xDF\xA3"):
+ content_type = "video/webm"
+ info = ffprobe(data)
+
+ # handle mp4
+ elif (size >= 8) and data[4:12] in ["ftypmp42", "ftypisom"]:
+ content_type = "video/mp4"
+
+ # handle ogg formats (vorbis/opus)
+ elif (size >= 64) and data[:4] == "OggS":
+ if data[28:35] == "\x01vorbis":
+ content_type = "audio/ogg"
+ elif data[28:36] == "OpusHead":
+ content_type = "audio/opus"
+
+ # handle MP3
+ elif (size >= 64) and (data[:3] == "ID3" or data[:3] == "\xFF\xFB"):
+ content_type = "audio/mpeg"
+
+ # handle MOD
+ elif (size >= 64) and data[1080:1084] == "M.K.":
+ content_type = "audio/mod"
+
+ # handle XM
+ elif (size >= 64) and data.startswith("Extended Module:"):
+ content_type = "audio/xm"
+
+ # handle S3M
+ elif (size >= 64) and data[25:32] == "\x00\x00\x00\x1A\x10\x00\x00":
+ content_type = "audio/s3m"
+
+ # handle PDF
+ elif (size >= 4) and data[:7] == "%PDF-1.":
+ content_type = "application/pdf"
+
+ # handle Shockwave Flash
+ elif (size >= 3) and data[:3] in ["CWS", "FWS"]:
+ content_type = "application/x-shockwave-flash"
+
+ # handle torrent
+ elif (size >= 11) and data[:11] == "d8:announce":
+ content_type = "application/x-bittorrent"
+
+ # handle PDF
+ elif (size >= 2) and data[:2] == "PK":
+ content_type = "application/epub+zip"
+
+ if content_type.startswith("video"):
+ info = ffprobe(data)
+ for stream in info['streams']:
+ if 'width' in stream:
+ width = stream['width']
+ height = stream['height']
+ break
+
+ return content_type, width, height, size, extra
+
def ffprobe(data):
- import json
- p = subprocess.Popen([Settings.FFPROBE_PATH, '-v', 'quiet', '-print_format', 'json', '-show_format', '-show_streams', '-'],
- stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE)
-
- out = p.communicate(input=data)[0]
- return json.loads(out)
-
+ import json
+ p = subprocess.Popen([Settings.FFPROBE_PATH, '-v', 'quiet', '-print_format', 'json', '-show_format', '-show_streams', '-'],
+ stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE)
+
+ out = p.communicate(input=data)[0]
+ return json.loads(out)
+
+
def ffprobe_f(filename):
- import json
-
- p = subprocess.Popen([Settings.FFPROBE_PATH, '-v', 'quiet', '-print_format', 'json', '-show_format', '-show_streams', filename],
- stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE)
-
- out = p.communicate()[0]
- return json.loads(out)
+ import json
+
+ p = subprocess.Popen([Settings.FFPROBE_PATH, '-v', 'quiet', '-print_format', 'json', '-show_format', '-show_streams', filename],
+ stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE)
+
+ out = p.communicate()[0]
+ return json.loads(out)
+
def getThumbDimensions(width, height, maxsize):
- """
- Calculate dimensions to use for a thumbnail with maximum width/height of
- <maxsize>, keeping aspect ratio
- """
- wratio = (float(maxsize) / float(width))
- hratio = (float(maxsize) / float(height))
-
- if (width <= maxsize) and (height <= maxsize):
- return width, height
- else:
- if (wratio * height) < maxsize:
- thumb_height = math.ceil(wratio * height)
- thumb_width = maxsize
+ """
+ Calculate dimensions to use for a thumbnail with maximum width/height of
+ <maxsize>, keeping aspect ratio
+ """
+ wratio = (float(maxsize) / float(width))
+ hratio = (float(maxsize) / float(height))
+
+ if (width <= maxsize) and (height <= maxsize):
+ return width, height
else:
- thumb_width = math.ceil(hratio * width)
- thumb_height = maxsize
-
- return int(thumb_width), int(thumb_height)
+ if (wratio * height) < maxsize:
+ thumb_height = math.ceil(wratio * height)
+ thumb_width = maxsize
+ else:
+ thumb_width = math.ceil(hratio * width)
+ thumb_height = maxsize
+
+ return int(thumb_width), int(thumb_height)
+
def checkFileDuplicate(data):
- """
- Check that the file <data> does not already exist in a live post on the
- current board by calculating its hex and checking it against the database
- """
- board = Settings._.BOARD
-
- file_hex = getMD5(data)
- post = FetchOne("SELECT `id`, `parentid` FROM `posts` WHERE `file_hex` = '%s' AND `boardid` = %s AND IS_DELETED = 0 LIMIT 1" % (file_hex, board['id']))
- if post:
- if int(post["parentid"]) != 0:
- return True, post["parentid"], post["id"]
+ """
+ Check that the file <data> does not already exist in a live post on the
+ current board by calculating its hex and checking it against the database
+ """
+ board = Settings._.BOARD
+
+ file_hex = getMD5(data)
+ post = FetchOne("SELECT `id`, `parentid` FROM `posts` WHERE `file_hex` = '%s' AND `boardid` = %s AND IS_DELETED = 0 LIMIT 1" % (
+ file_hex, board['id']))
+ if post:
+ if int(post["parentid"]) != 0:
+ return True, post["parentid"], post["id"]
+ else:
+ return True, post["id"], post["id"]
else:
- return True, post["id"], post["id"]
- else:
- return False, 0, 0
+ return False, 0, 0
+
def getJpegSegments(data):
- if data[0:2] != b"\xff\xd8":
- raise UserError("Given data isn't JPEG.")
-
- head = 2
- segments = [b"\xff\xd8"]
- while 1:
- if data[head: head + 2] == b"\xff\xda":
- yield data[head:]
- break
- else:
- length = struct.unpack(">H", data[head + 2: head + 4])[0]
- endPoint = head + length + 2
- seg = data[head: endPoint]
- yield seg
- head = endPoint
-
- if (head >= len(data)):
- raise UserDataError("Wrong JPEG data.")
-
+ if data[0:2] != b"\xff\xd8":
+ raise UserError("Given data isn't JPEG.")
+
+ head = 2
+ segments = [b"\xff\xd8"]
+ while 1:
+ if data[head: head + 2] == b"\xff\xda":
+ yield data[head:]
+ break
+ else:
+ length = struct.unpack(">H", data[head + 2: head + 4])[0]
+ endPoint = head + length + 2
+ seg = data[head: endPoint]
+ yield seg
+ head = endPoint
+
+ if (head >= len(data)):
+ raise UserDataError("Wrong JPEG data.")
+
+
def removeExifData(src_data):
- exif = None
-
- for seg in getJpegSegments(src_data):
- if seg[0:2] == b"\xff\xe1" and seg[4:10] == b"Exif\x00\x00":
- exif = seg
- break
-
- if exif:
- return src_data.replace(exif, b"")
- else:
- return src_data
+ exif = None
+
+ for seg in getJpegSegments(src_data):
+ if seg[0:2] == b"\xff\xe1" and seg[4:10] == b"Exif\x00\x00":
+ exif = seg
+ break
+
+ if exif:
+ return src_data.replace(exif, b"")
+ else:
+ return src_data
diff --git a/cgi/manage.py b/cgi/manage.py
index c4581be..efcee8f 100644
--- a/cgi/manage.py
+++ b/cgi/manage.py
@@ -14,1920 +14,2172 @@ from formatting import *
from template import *
from post import *
+
def manage(self, path_split):
- page = ''
- validated = False
- administrator = False
- moderator = True
- skiptemplate = False
- staff_account = None
-
- if 'username' in self.formdata and 'password' in self.formdata:
- # If no admin accounts available, create admin:admin
- first_admin = FetchOne("SELECT 1 FROM `staff` WHERE `rights` = 0 LIMIT 1", 0)
- if not first_admin:
- InsertDb("INSERT INTO `staff` (`username`, `password`, `added`, `rights`) VALUES ('admin', '" + _mysql.escape_string(genPasswdHash("admin")) + "', 0, 0)")
-
- staff_account = verifyPasswd(self.formdata['username'], self.formdata['password'])
+ page = ''
+ validated = False
+ administrator = False
+ moderator = True
+ skiptemplate = False
+ staff_account = None
+
+ if 'username' in self.formdata and 'password' in self.formdata:
+ # If no admin accounts available, create admin:admin
+ first_admin = FetchOne(
+ "SELECT 1 FROM `staff` WHERE `rights` = 0 LIMIT 1", 0)
+ if not first_admin:
+ InsertDb("INSERT INTO `staff` (`username`, `password`, `added`, `rights`) VALUES ('admin', '" +
+ _mysql.escape_string(genPasswdHash("admin")) + "', 0, 0)")
+
+ staff_account = verifyPasswd(
+ self.formdata['username'], self.formdata['password'])
+ if staff_account:
+ session_uuid = newSession(staff_account['id'])
+ setCookie(self, 'weabot_manage', session_uuid)
+ UpdateDb('DELETE FROM `logs` WHERE `timestamp` < ' +
+ str(timestamp() - 604800)) # one week
+ else:
+ page += _('Incorrect username/password.')
+ logAction('', 'Failed log-in. U:'+_mysql.escape_string(
+ self.formdata['username'])+' IP:'+self.environ["REMOTE_ADDR"])
+ else:
+ # Validate existing session
+ manage_cookie = getCookie(self, 'weabot_manage')
+ if manage_cookie:
+ staff_account = validateSession(manage_cookie)
+ if not staff_account:
+ page += "La sesión ha expirado. Por favor ingresa tus credenciales nuevamente."
+ deleteCookie(self, 'weabot_manage')
+
if staff_account:
- session_uuid = newSession(staff_account['id'])
- setCookie(self, 'weabot_manage', session_uuid)
- UpdateDb('DELETE FROM `logs` WHERE `timestamp` < ' + str(timestamp() - 604800)) # one week
+ validated = True
+ if 'session_id' in staff_account:
+ renewSession(staff_account['session_id'])
+
+ if staff_account['rights'] in ['0', '1', '2']:
+ administrator = True
+ if staff_account['rights'] == '2':
+ moderator = False
+ UpdateDb('UPDATE `staff` SET `lastactive` = ' + str(timestamp()
+ ) + ' WHERE `id` = ' + staff_account['id'] + ' LIMIT 1')
+
+ if not validated:
+ template_filename = "login.html"
+ template_values = {}
else:
- page += _('Incorrect username/password.')
- logAction('', 'Failed log-in. U:'+_mysql.escape_string(self.formdata['username'])+' IP:'+self.environ["REMOTE_ADDR"])
- else:
- # Validate existing session
- manage_cookie = getCookie(self, 'weabot_manage')
- if manage_cookie:
- staff_account = validateSession(manage_cookie)
- if not staff_account:
- page += "La sesión ha expirado. Por favor ingresa tus credenciales nuevamente."
- deleteCookie(self, 'weabot_manage')
-
- if staff_account:
- validated = True
- if 'session_id' in staff_account:
- renewSession(staff_account['session_id'])
-
- if staff_account['rights'] in ['0', '1', '2']:
- administrator = True
- if staff_account['rights'] == '2':
- moderator = False
- UpdateDb('UPDATE `staff` SET `lastactive` = ' + str(timestamp()) + ' WHERE `id` = ' + staff_account['id'] + ' LIMIT 1')
-
- if not validated:
- template_filename = "login.html"
- template_values = {}
- else:
- if len(path_split) > 2:
- if path_split[2] == 'rebuild':
- if not administrator:
- return
-
- try:
- board_dir = path_split[3]
- except:
- board_dir = ''
-
- if board_dir == '':
- template_filename = "rebuild.html"
- template_values = {'boards': boardlist()}
- else:
- everything = ("everything" in self.formdata)
- if board_dir == '!ALL':
- t1 = time.time()
- boards = FetchAll('SELECT `dir` FROM `boards` WHERE secret = 0')
- for board in boards:
- board = setBoard(board['dir'])
- regenerateBoard(everything)
-
- message = _('Rebuilt %(board)s in %(time)s seconds.') % {'board': _('all boards'), 'time': timeTaken(t1, time.time())}
- logAction(staff_account['username'], _('Rebuilt %s') % _('all boards'))
- elif board_dir == '!BBS':
- t1 = time.time()
- boards = FetchAll('SELECT `dir` FROM `boards` WHERE `board_type` = 1')
- for board in boards:
- board = setBoard(board['dir'])
- regenerateBoard(everything)
-
- message = _('Rebuilt %(board)s in %(time)s seconds.') % {'board': _('all boards'), 'time': timeTaken(t1, time.time())}
- logAction(staff_account['username'], _('Rebuilt %s') % _('all boards'))
- elif board_dir == '!IB':
- t1 = time.time()
- boards = FetchAll('SELECT `dir` FROM `boards` WHERE `board_type` = 1')
- for board in boards:
- board = setBoard(board['dir'])
- regenerateBoard(everything)
-
- message = _('Rebuilt %(board)s in %(time)s seconds.') % {'board': _('all boards'), 'time': timeTaken(t1, time.time())}
- logAction(staff_account['username'], _('Rebuilt %s') % _('all boards'))
- elif board_dir == '!HOME':
- t1 = time.time()
- regenerateHome()
- message = _('Rebuilt %(board)s in %(time)s seconds.') % {'board': _('home'), 'time': timeTaken(t1, time.time())}
- logAction(staff_account['username'], _('Rebuilt %s') % _('home'))
- elif board_dir == '!NEWS':
- t1 = time.time()
- regenerateNews()
- message = _('Rebuilt %(board)s in %(time)s seconds.') % {'board': _('news'), 'time': timeTaken(t1, time.time())}
- logAction(staff_account['username'], _('Rebuilt %s') % _('news'))
- elif board_dir == '!KAKO':
- t1 = time.time()
- boards = FetchAll('SELECT `dir` FROM `boards` WHERE archive = 1')
- for board in boards:
- board = setBoard(board['dir'])
- regenerateKako()
-
- message = _('Rebuilt %(board)s in %(time)s seconds.') % {'board': 'kako', 'time': timeTaken(t1, time.time())}
- logAction(staff_account['username'], _('Rebuilt %s') % 'kako')
- elif board_dir == '!HTACCESS':
- t1 = time.time()
- if regenerateAccess():
- message = _('Rebuilt %(board)s in %(time)s seconds.') % {'board': _('htaccess'), 'time': timeTaken(t1, time.time())}
- logAction(staff_account['username'], _('Rebuilt %s') % _('htaccess'))
- else:
- message = _('htaccess regeneration deactivated by sysop.')
- else:
- t1 = time.time()
- board = setBoard(board_dir)
- regenerateBoard(everything)
-
- message = _('Rebuilt %(board)s in %(time)s seconds.') % {'board': '/' + board['dir'] + '/', 'time': timeTaken(t1, time.time())}
- logAction(staff_account['username'], 'Rebuilt /' + board['dir'] + '/')
-
- template_filename = "message.html"
- elif path_split[2] == 'mod':
- if not moderator:
- return
+ if len(path_split) > 2:
+ if path_split[2] == 'rebuild':
+ if not administrator:
+ return
+
+ try:
+ board_dir = path_split[3]
+ except:
+ board_dir = ''
+
+ if board_dir == '':
+ template_filename = "rebuild.html"
+ template_values = {'boards': boardlist()}
+ else:
+ everything = ("everything" in self.formdata)
+ if board_dir == '!ALL':
+ t1 = time.time()
+ boards = FetchAll(
+ 'SELECT `dir` FROM `boards` WHERE secret = 0')
+ for board in boards:
+ board = setBoard(board['dir'])
+ regenerateBoard(everything)
+
+ message = _('Rebuilt %(board)s in %(time)s seconds.') % {
+ 'board': _('all boards'), 'time': timeTaken(t1, time.time())}
+ logAction(staff_account['username'], _(
+ 'Rebuilt %s') % _('all boards'))
+ elif board_dir == '!BBS':
+ t1 = time.time()
+ boards = FetchAll(
+ 'SELECT `dir` FROM `boards` WHERE `board_type` = 1')
+ for board in boards:
+ board = setBoard(board['dir'])
+ regenerateBoard(everything)
+
+ message = _('Rebuilt %(board)s in %(time)s seconds.') % {
+ 'board': _('all boards'), 'time': timeTaken(t1, time.time())}
+ logAction(staff_account['username'], _(
+ 'Rebuilt %s') % _('all boards'))
+ elif board_dir == '!IB':
+ t1 = time.time()
+ boards = FetchAll(
+ 'SELECT `dir` FROM `boards` WHERE `board_type` = 1')
+ for board in boards:
+ board = setBoard(board['dir'])
+ regenerateBoard(everything)
+
+ message = _('Rebuilt %(board)s in %(time)s seconds.') % {
+ 'board': _('all boards'), 'time': timeTaken(t1, time.time())}
+ logAction(staff_account['username'], _(
+ 'Rebuilt %s') % _('all boards'))
+ elif board_dir == '!HOME':
+ t1 = time.time()
+ regenerateHome()
+ message = _('Rebuilt %(board)s in %(time)s seconds.') % {
+ 'board': _('home'), 'time': timeTaken(t1, time.time())}
+ logAction(staff_account['username'], _(
+ 'Rebuilt %s') % _('home'))
+ elif board_dir == '!NEWS':
+ t1 = time.time()
+ regenerateNews()
+ message = _('Rebuilt %(board)s in %(time)s seconds.') % {
+ 'board': _('news'), 'time': timeTaken(t1, time.time())}
+ logAction(staff_account['username'], _(
+ 'Rebuilt %s') % _('news'))
+ elif board_dir == '!KAKO':
+ t1 = time.time()
+ boards = FetchAll(
+ 'SELECT `dir` FROM `boards` WHERE archive = 1')
+ for board in boards:
+ board = setBoard(board['dir'])
+ regenerateKako()
+
+ message = _('Rebuilt %(board)s in %(time)s seconds.') % {
+ 'board': 'kako', 'time': timeTaken(t1, time.time())}
+ logAction(staff_account['username'], _(
+ 'Rebuilt %s') % 'kako')
+ elif board_dir == '!HTACCESS':
+ t1 = time.time()
+ if regenerateAccess():
+ message = _('Rebuilt %(board)s in %(time)s seconds.') % {
+ 'board': _('htaccess'), 'time': timeTaken(t1, time.time())}
+ logAction(staff_account['username'], _(
+ 'Rebuilt %s') % _('htaccess'))
+ else:
+ message = _(
+ 'htaccess regeneration deactivated by sysop.')
+ else:
+ t1 = time.time()
+ board = setBoard(board_dir)
+ regenerateBoard(everything)
+
+ message = _('Rebuilt %(board)s in %(time)s seconds.') % {
+ 'board': '/' + board['dir'] + '/', 'time': timeTaken(t1, time.time())}
+ logAction(staff_account['username'],
+ 'Rebuilt /' + board['dir'] + '/')
+
+ template_filename = "message.html"
+ elif path_split[2] == 'mod':
+ if not moderator:
+ return
+
+ try:
+ board = setBoard(path_split[3])
+ except:
+ board = ""
+
+ if not board:
+ template_filename = "mod.html"
+ template_values = {"mode": 1, 'boards': boardlist()}
+ elif self.formdata.get("thread"):
+ parentid = int(self.formdata["thread"])
+ posts = FetchAll('SELECT id, timestamp, timestamp_formatted, name, message, file, thumb, IS_DELETED, locked, subject, length, INET_NTOA(ip) AS ip FROM `posts` WHERE (parentid = %d OR id = %d) AND boardid = %s ORDER BY `id` ASC' % (
+ parentid, parentid, board['id']))
+ template_filename = "mod.html"
+ template_values = {"mode": 3,
+ "dir": board["dir"], "posts": posts}
+ else:
+ threads = FetchAll(
+ "SELECT * FROM `posts` WHERE boardid = %s AND parentid = 0 ORDER BY `bumped` DESC" % board["id"])
+ template_filename = "mod.html"
+ template_values = {"mode": 2,
+ "dir": board["dir"], "threads": threads}
+ elif path_split[2] == "recent":
+ posts = FetchAll("SELECT posts.id, posts.subject, dir, boards.board_type, parentid, file, thumb, timestamp_formatted, timestamp, posts.message, INET_NTOA(ip) AS ip, posts.name, email, tripcode, boards.name AS board_name FROM posts INNER JOIN boards ON posts.boardid = boards.id WHERE posts.timestamp > UNIX_TIMESTAMP() - 86400 ORDER BY timestamp DESC")
+ template_filename = "recent.html"
+ template_values = {"posts": posts}
+ elif path_split[2] == 'staff':
+ if staff_account['rights'] != '0':
+ return
+ action_taken = False
+
+ if len(path_split) > 3:
+ if path_split[3] == 'add' or path_split[3] == 'edit':
+ member = None
+ member_username = ''
+ member_rights = '3'
+
+ if path_split[3] == 'edit':
+ if len(path_split) > 4:
+ member = FetchOne(
+ 'SELECT * FROM `staff` WHERE `id` = ' + _mysql.escape_string(path_split[4]) + ' LIMIT 1')
+ if member:
+ member_username = member['username']
+ member_rights = member['rights']
+ action = 'edit/' + member['id']
+
+ try:
+ if self.formdata.get('user'):
+ if self.formdata['rights'] in ['0', '1', '2', '3']:
+ action_taken = True
+
+ UpdateDb("UPDATE `staff` SET `username` = '" + _mysql.escape_string(
+ self.formdata['user']) + "', `rights` = " + self.formdata['rights'] + " WHERE `id` = " + member['id'] + " LIMIT 1")
+ message = _(
+ 'Staff member updated.')
+ logAction(staff_account['username'], _(
+ 'Updated staff account for %s') % self.formdata['user'])
+ template_filename = "message.html"
+ except:
+ pass
+ else:
+ action = 'add'
+ try:
+ if self.formdata.get('user') and self.formdata.get('pass'):
+ username_taken = FetchOne(
+ 'SELECT * FROM `staff` WHERE `username` = \'' + _mysql.escape_string(self.formdata['user']) + '\' LIMIT 1')
+ if not username_taken:
+ if self.formdata['rights'] in ['0', '1', '2', '3']:
+ action_taken = True
+ pass_hash = genPasswdHash(
+ self.formdata['pass'])
+
+ InsertDb("INSERT INTO `staff` (`username`, `password`, `added`, `rights`) VALUES ('" + _mysql.escape_string(
+ self.formdata['user']) + "', '" + _mysql.escape_string(pass_hash) + "', " + str(timestamp()) + ", " + self.formdata['rights'] + ")")
+ message = _('Staff member added.')
+ logAction(
+ staff_account['username'], 'Added staff account for ' + self.formdata['user'])
+
+ template_filename = "message.html"
+ else:
+ action_taken = True
+ message = _(
+ 'That username is already in use.')
+ template_filename = "message.html"
+ except:
+ pass
+
+ if not action_taken:
+ action_taken = True
+
+ if action == 'add':
+ submit = 'Agregar'
+ else:
+ submit = 'Editar'
+
+ template_filename = "staff.html"
+ template_values = {'mode': 1,
+ 'action': action,
+ 'member': member,
+ 'member_username': member_username,
+ 'member_rights': member_rights,
+ 'submit': submit}
+ elif path_split[3] == 'delete':
+ if not moderator:
+ return
- try:
- board = setBoard(path_split[3])
- except:
- board = ""
-
- if not board:
- template_filename = "mod.html"
- template_values = {"mode": 1, 'boards': boardlist()}
- elif self.formdata.get("thread"):
- parentid = int(self.formdata["thread"])
- posts = FetchAll('SELECT id, timestamp, timestamp_formatted, name, message, file, thumb, IS_DELETED, locked, subject, length, INET_NTOA(ip) AS ip FROM `posts` WHERE (parentid = %d OR id = %d) AND boardid = %s ORDER BY `id` ASC' % (parentid, parentid, board['id']))
- template_filename = "mod.html"
- template_values = {"mode": 3, "dir": board["dir"], "posts": posts}
- else:
- threads = FetchAll("SELECT * FROM `posts` WHERE boardid = %s AND parentid = 0 ORDER BY `bumped` DESC" % board["id"])
- template_filename = "mod.html"
- template_values = {"mode": 2, "dir": board["dir"], "threads": threads}
- elif path_split[2] == "recent":
- posts = FetchAll("SELECT posts.id, posts.subject, dir, boards.board_type, parentid, file, thumb, timestamp_formatted, timestamp, posts.message, INET_NTOA(ip) AS ip, posts.name, email, tripcode, boards.name AS board_name FROM posts INNER JOIN boards ON posts.boardid = boards.id WHERE posts.timestamp > UNIX_TIMESTAMP() - 86400 ORDER BY timestamp DESC")
- template_filename = "recent.html"
- template_values = {"posts": posts}
- elif path_split[2] == 'staff':
- if staff_account['rights'] != '0':
- return
- action_taken = False
-
- if len(path_split) > 3:
- if path_split[3] == 'add' or path_split[3] == 'edit':
- member = None
- member_username = ''
- member_rights = '3'
-
- if path_split[3] == 'edit':
- if len(path_split) > 4:
- member = FetchOne('SELECT * FROM `staff` WHERE `id` = ' + _mysql.escape_string(path_split[4]) + ' LIMIT 1')
- if member:
- member_username = member['username']
- member_rights = member['rights']
- action = 'edit/' + member['id']
-
- try:
- if self.formdata.get('user'):
- if self.formdata['rights'] in ['0', '1', '2', '3']:
action_taken = True
-
- UpdateDb("UPDATE `staff` SET `username` = '" + _mysql.escape_string(self.formdata['user']) + "', `rights` = " + self.formdata['rights'] + " WHERE `id` = " + member['id'] + " LIMIT 1")
- message = _('Staff member updated.')
- logAction(staff_account['username'], _('Updated staff account for %s') % self.formdata['user'])
+ message = '<a href="' + Settings.CGI_URL + 'manage/staff/delete_confirmed/' + \
+ path_split[4] + '">' + _(
+ 'Click here to confirm the deletion of that staff member') + '</a>'
template_filename = "message.html"
- except:
+ elif path_split[3] == 'delete_confirmed':
+ if not moderator:
+ return
+
+ try:
+ action_taken = True
+ member = FetchOne(
+ 'SELECT `username` FROM `staff` WHERE `id` = ' + _mysql.escape_string(path_split[4]) + ' LIMIT 1')
+ if member:
+ UpdateDb('DELETE FROM `staff` WHERE `id` = ' +
+ _mysql.escape_string(path_split[4]) + ' LIMIT 1')
+ message = 'Staff member deleted.'
+ template_filename = "message.html"
+ logAction(staff_account['username'], _(
+ 'Deleted staff account for %s') % member['username'])
+ else:
+ message = _(
+ 'Unable to locate a staff account with that ID.')
+ template_filename = "message.html"
+ except:
+ pass
+
+ if not action_taken:
+ staff = FetchAll('SELECT * FROM `staff` ORDER BY `rights`')
+ for member in staff:
+ if member['rights'] == '0':
+ member['rights'] = _('Super-administrator')
+ elif member['rights'] == '1':
+ member['rights'] = _('Administrator')
+ elif member['rights'] == '2':
+ member['rights'] = _('Developer')
+ elif member['rights'] == '3':
+ member['rights'] = _('Moderator')
+ if member['lastactive'] != '0':
+ member['lastactivestamp'] = member['lastactive']
+ member['lastactive'] = formatTimestamp(
+ member['lastactive'])
+ else:
+ member['lastactive'] = _('Never')
+ member['lastactivestamp'] = '0'
+ template_filename = "staff.html"
+ template_values = {'mode': 0, 'staff': staff}
+ elif path_split[2] == 'delete':
+ if not moderator:
+ return
+
+ do_ban = False
+ try:
+ if self.formdata['ban'] == 'true':
+ do_ban = True
+ except:
pass
- else:
- action = 'add'
- try:
- if self.formdata.get('user') and self.formdata.get('pass'):
- username_taken = FetchOne('SELECT * FROM `staff` WHERE `username` = \'' + _mysql.escape_string(self.formdata['user']) + '\' LIMIT 1')
- if not username_taken:
- if self.formdata['rights'] in ['0', '1', '2', '3']:
- action_taken = True
- pass_hash = genPasswdHash(self.formdata['pass'])
-
- InsertDb("INSERT INTO `staff` (`username`, `password`, `added`, `rights`) VALUES ('" + _mysql.escape_string(self.formdata['user']) + "', '" + _mysql.escape_string(pass_hash) + "', " + str(timestamp()) + ", " + self.formdata['rights'] + ")")
- message = _('Staff member added.')
- logAction(staff_account['username'], 'Added staff account for ' + self.formdata['user'])
-
- template_filename = "message.html"
- else:
- action_taken = True
- message = _('That username is already in use.')
- template_filename = "message.html"
- except:
- pass
-
- if not action_taken:
- action_taken = True
-
- if action == 'add':
- submit = 'Agregar'
- else:
- submit = 'Editar'
-
- template_filename = "staff.html"
- template_values = {'mode': 1,
- 'action': action,
- 'member': member,
- 'member_username': member_username,
- 'member_rights': member_rights,
- 'submit': submit}
- elif path_split[3] == 'delete':
- if not moderator:
- return
-
- action_taken = True
- message = '<a href="' + Settings.CGI_URL + 'manage/staff/delete_confirmed/' + path_split[4] + '">' + _('Click here to confirm the deletion of that staff member') + '</a>'
- template_filename = "message.html"
- elif path_split[3] == 'delete_confirmed':
- if not moderator:
- return
-
- try:
- action_taken = True
- member = FetchOne('SELECT `username` FROM `staff` WHERE `id` = ' + _mysql.escape_string(path_split[4]) + ' LIMIT 1')
- if member:
- UpdateDb('DELETE FROM `staff` WHERE `id` = ' + _mysql.escape_string(path_split[4]) + ' LIMIT 1')
- message = 'Staff member deleted.'
- template_filename = "message.html"
- logAction(staff_account['username'], _('Deleted staff account for %s') % member['username'])
- else:
- message = _('Unable to locate a staff account with that ID.')
+
+ template_filename = "delete.html"
+ template_values = {
+ 'do_ban': do_ban, 'curboard': path_split[3], 'postid': path_split[4]}
+ elif path_split[2] == 'delete_confirmed':
+ if not moderator:
+ return
+
+ do_ban = self.formdata.get('ban')
+ permanently = self.formdata.get('perma')
+ imageonly = self.formdata.get('imageonly')
+
+ board = setBoard(path_split[3])
+ postid = int(path_split[4])
+ post = FetchOne('SELECT id, message, parentid, INET_NTOA(ip) AS ip FROM posts WHERE boardid = %s AND id = %s' % (
+ board['id'], postid))
+
+ if not permanently:
+ deletePost(path_split[4], None, '2', imageonly)
+ else:
+ deletePost(path_split[4], None, '0', imageonly)
+ regenerateHome()
+
+ # Borrar denuncias
+ UpdateDb("DELETE FROM `reports` WHERE `postid` = '" +
+ _mysql.escape_string(path_split[4])+"'")
+ boards = FetchAll(
+ 'SELECT `name`, `dir` FROM `boards` ORDER BY `dir`')
+
+ if imageonly:
+ message = 'Archivo de post /%s/%s eliminado.' % (
+ board['dir'], post['id'])
+ elif permanently or post["parentid"] == '0':
+ message = 'Post /%s/%s eliminado permanentemente.' % (
+ board['dir'], post['id'])
+ else:
+ message = 'Post /%s/%s enviado a la papelera.' % (
+ board['dir'], post['id'])
template_filename = "message.html"
- except:
- pass
-
- if not action_taken:
- staff = FetchAll('SELECT * FROM `staff` ORDER BY `rights`')
- for member in staff:
- if member['rights'] == '0':
- member ['rights'] = _('Super-administrator')
- elif member['rights'] == '1':
- member ['rights'] = _('Administrator')
- elif member['rights'] == '2':
- member ['rights'] = _('Developer')
- elif member['rights'] == '3':
- member ['rights'] = _('Moderator')
- if member['lastactive'] != '0':
- member['lastactivestamp'] = member['lastactive']
- member['lastactive'] = formatTimestamp(member['lastactive'])
- else:
- member['lastactive'] = _('Never')
- member['lastactivestamp'] = '0'
- template_filename = "staff.html"
- template_values = {'mode': 0, 'staff': staff}
- elif path_split[2] == 'delete':
- if not moderator:
- return
-
- do_ban = False
- try:
- if self.formdata['ban'] == 'true':
- do_ban = True
- except:
- pass
-
- template_filename = "delete.html"
- template_values = {'do_ban': do_ban, 'curboard': path_split[3], 'postid': path_split[4]}
- elif path_split[2] == 'delete_confirmed':
- if not moderator:
- return
-
- do_ban = self.formdata.get('ban')
- permanently = self.formdata.get('perma')
- imageonly = self.formdata.get('imageonly')
-
- board = setBoard(path_split[3])
- postid = int(path_split[4])
- post = FetchOne('SELECT id, message, parentid, INET_NTOA(ip) AS ip FROM posts WHERE boardid = %s AND id = %s' % (board['id'], postid))
-
- if not permanently:
- deletePost(path_split[4], None, '2', imageonly)
- else:
- deletePost(path_split[4], None, '0', imageonly)
- regenerateHome()
-
- # Borrar denuncias
- UpdateDb("DELETE FROM `reports` WHERE `postid` = '"+_mysql.escape_string(path_split[4])+"'")
- boards = FetchAll('SELECT `name`, `dir` FROM `boards` ORDER BY `dir`')
-
- if imageonly:
- message = 'Archivo de post /%s/%s eliminado.' % (board['dir'], post['id'])
- elif permanently or post["parentid"] == '0':
- message = 'Post /%s/%s eliminado permanentemente.' % (board['dir'], post['id'])
- else:
- message = 'Post /%s/%s enviado a la papelera.' % (board['dir'], post['id'])
- template_filename = "message.html"
- logAction(staff_account['username'], message + ' Contenido: ' + post['message'] + ' IP: ' + post['ip'])
-
- if do_ban:
- message = _('Redirecting to ban page...') + '<meta http-equiv="refresh" content="0;url=' + Settings.CGI_URL + 'manage/ban?ip=' + post['ip'] + '" />'
- template_filename = "message.html"
- elif path_split[2] == 'lock':
- setLocked = 0
-
- # Nos vamos al board y ubicamos el post
- board = setBoard(path_split[3])
- post = FetchOne('SELECT `parentid`, `locked` FROM `posts` WHERE `boardid` = ' + board['id'] + ' AND `id` = \'' + _mysql.escape_string(path_split[4]) + '\' LIMIT 1')
- if not post:
- message = _('Unable to locate a post with that ID.')
- template_filename = "message.html"
- else:
- if post['parentid'] != '0':
- message = _('Post is not a thread opener.')
- template_filename = "message.html"
- else:
- if post['locked'] == '0':
- # Cerrar si esta abierto
- setLocked = 1
- else:
- # Abrir si esta cerrado
- setLocked = 0
-
- UpdateDb("UPDATE `posts` SET `locked` = %d WHERE `boardid` = '%s' AND `id` = '%s' LIMIT 1" % (setLocked, board["id"], _mysql.escape_string(path_split[4])))
- threadUpdated(path_split[4])
- if setLocked == 1:
- message = _('Thread successfully closed.')
- logAction(staff_account['username'], _('Closed thread %s') % ('/' + path_split[3] + '/' + path_split[4]))
- else:
- message = _('Thread successfully opened.')
- logAction(staff_account['username'], _('Opened thread %s') % ('/' + path_split[3] + '/' + path_split[4]))
- template_filename = "message.html"
- elif path_split[2] == 'permasage':
- setPermasaged = 0
-
- # Nos vamos al board y ubicamos el post
- board = setBoard(path_split[3])
- post = FetchOne('SELECT `parentid`, `locked` FROM `posts` WHERE `boardid` = ' + board['id'] + ' AND `id` = \'' + _mysql.escape_string(path_split[4]) + '\' LIMIT 1')
- if not post:
- message = 'Unable to locate a post with that ID.'
- template_filename = "message.html"
- elif post['locked'] == '1':
- message = 'Solo se puede aplicar permasage en un hilo abierto.'
- template_filename = "message.html"
- else:
- if post['parentid'] != '0':
- message = 'Post is not a thread opener.'
- template_filename = "message.html"
- else:
- if post['locked'] == '2':
- # Sacar permasage
- setPermasaged = 0
- else:
- # Colocar permasage
- setPermasaged = 2
-
- UpdateDb("UPDATE `posts` SET `locked` = %d WHERE `boardid` = '%s' AND `id` = '%s' LIMIT 1" % (setPermasaged, board["id"], _mysql.escape_string(path_split[4])))
- regenerateFrontPages()
- threadUpdated(path_split[4])
-
- if setPermasaged == 2:
- message = 'Thread successfully permasaged.'
- logAction(staff_account['username'], 'Enabled permasage in thread /' + path_split[3] + '/' + path_split[4])
- else:
- message = 'Thread successfully un-permasaged.'
- logAction(staff_account['username'], 'Disabled permasage in thread /' + path_split[3] + '/' + path_split[4])
- template_filename = "message.html"
- elif path_split[2] == 'move':
- if not moderator:
- return
-
- oldboardid = ""
- oldthread = ""
- newboardid = ""
- try:
- oldboardid = path_split[3]
- oldthread = path_split[4]
- newboardid = path_split[5]
- except:
- pass
+ logAction(staff_account['username'], message +
+ ' Contenido: ' + post['message'] + ' IP: ' + post['ip'])
- try:
- oldboardid = self.formdata['oldboardid']
- oldthread = self.formdata['oldthread']
- newboardid = self.formdata['newboardid']
- except:
- pass
-
- if oldboardid and oldthread and newboardid:
- message = "import"
- import shutil
- message += "ok"
-
- board = setBoard(oldboardid)
- oldboard = board['dir']
- oldboardsubject = board['subject']
- oldboardname = random.choice(board["anonymous"].split('|'))
-
- # get old posts
- posts = FetchAll("SELECT * FROM `posts` WHERE (`id` = {0} OR `parentid` = {0}) AND `boardid` = {1} ORDER BY id ASC".format(oldthread, board['id']))
-
- # switch to new board
- board = setBoard(newboardid)
- newboard = board['dir']
-
- refs = {}
- moved_files = []
- moved_thumbs = []
- moved_cats = []
- newthreadid = 0
- newthread = 0
- num = 1
-
- message = "from total: %s<br>" % len(posts)
- template_filename = "message.html"
-
- for p in posts:
- # save old post ID
- old_id = p['id']
- is_op = bool(p['parentid'] == '0')
-
- # copy post object but without ID and target boardid
- post = Post()
- post.post = p
- post.post.pop("id")
- post["boardid"] = board['id']
- post["parentid"] = newthreadid
-
- # save the files we need to move if any
- if post['IS_DELETED'] == '0':
- if post['file']:
- moved_files.append(post['file'])
- if post['thumb']:
- moved_thumbs.append(post['thumb'])
- if is_op:
- moved_cats.append(post['thumb'])
-
- # fix subject if necessary
- if post['subject'] and post['subject'] == oldboardsubject:
- post['subject'] = board['subject']
-
- # fix new default name
- if post['name'] == oldboardname:
- post['name'] = board['anonymous']
-
- # fix date and (re)add post ID if necessary
- post['timestamp_formatted'] = formatTimestamp(post['timestamp'])
- if board["useid"] != '0':
- if post["parentid"]:
- tym = parent_time
- else:
- tym = post["timestamp"]
- post['timestamp_formatted'] += ' ID:' + iphash(inet_ntoa(long(post['ip'])), post, tym, board["useid"], False, '', False, False, (board["countrycode"] in ['1', '2']))
-
- # insert new post and get its new ID
- new_id = post.insert()
-
- # save the reference (BBS = post number, IB = new ID)
- refs[old_id] = num if board['board_type'] == '1' else new_id
-
- # this was an OP
- message += "newthread = %s parentid = %s<br>" % (newthreadid, p['parentid'])
- if is_op:
- oldthread = old_id
- newthreadid = new_id
- oldbumped = post["bumped"]
-
- # BBS = new thread timestamp, IB = new thread ID
- newthread = post['timestamp'] if board['board_type'] == '1' else new_id
- parent_time = post['timestamp']
-
- # log it
- message += "%s -> %s<br>" % (old_id, new_id)
-
- num += 1
-
- # fix anchors
- for old, new in refs.iteritems():
- old_url = "/{oldboard}/res/{oldthread}.html#{oldpost}\">&gt;&gt;{oldpost}</a>".format(oldboard=oldboard, oldthread=oldthread, oldpost=old)
-
- if board['board_type'] == '1':
- new_url = "/{newboard}/read/{newthread}/{newpost}\">&gt;&gt;{newpost}</a>".format(newboard=newboard, newthread=newthread, newpost=new)
- else:
- new_url = "/{newboard}/res/{newthread}.html#{newpost}\">&gt;&gt;{newpost}</a>".format(newboard=newboard, newthread=newthread, newpost=new)
-
- sql = "UPDATE `posts` SET `message` = REPLACE(message, '{old}', '{new}') WHERE `boardid` = {newboardid} AND (`id` = {newthreadid} OR `parentid` = {newthreadid})".format(old=old_url, new=new_url, newboardid=board['id'], newthreadid=newthreadid)
- message += sql + "<br>"
- UpdateDb(sql)
-
- # copy files
- for file in moved_files:
- if not os.path.isfile(Settings.IMAGES_DIR + newboard + "/src/" + file):
- shutil.copyfile(Settings.IMAGES_DIR + oldboard + "/src/" + file, Settings.IMAGES_DIR + newboard + "/src/" + file)
- for thumb in moved_thumbs:
- if not os.path.isfile(Settings.IMAGES_DIR + newboard + "/thumb/" + thumb):
- shutil.copyfile(Settings.IMAGES_DIR + oldboard + "/thumb/" + thumb, Settings.IMAGES_DIR + newboard + "/thumb/" + thumb)
- if not os.path.isfile(Settings.IMAGES_DIR + newboard + "/mobile/" + thumb):
- shutil.copyfile(Settings.IMAGES_DIR + oldboard + "/mobile/" + thumb, Settings.IMAGES_DIR + newboard + "/mobile/" + thumb)
- for cat in moved_cats:
- try:
- if not os.path.isfile(Settings.IMAGES_DIR + newboard + "/cat/" + thumb):
- shutil.copyfile(Settings.IMAGES_DIR + oldboard + "/cat/" + thumb, Settings.IMAGES_DIR + newboard + "/cat/" + thumb)
- except:
- pass
-
- # lock original, set expiration to 1 day
- exp = timestamp()+86400
- exp_format = datetime.datetime.fromtimestamp(exp).strftime("%d/%m")
- sql = "UPDATE `posts` SET `locked`=1, `expires`={exp}, `expires_formatted`=\"{exp_format}\" WHERE `boardid`=\"{oldboard}\" AND id=\"{oldthread}\"".format(exp=exp,exp_format=exp_format,oldboard=oldboardid,oldthread=oldthread)
- UpdateDb(sql)
-
- # insert notice message
- if 'msg' in self.formdata:
- leavemsg = True
- board = setBoard(oldboard)
-
- if board['board_type'] == '1':
- thread_url = "/{newboard}/read/{newthread}".format(newboard=newboard, newthread=newthread)
- else:
- thread_url = "/{newboard}/res/{newthread}.html".format(newboard=newboard, newthread=newthread)
-
- notice_post = Post(board["id"])
- notice_post["parentid"] = oldthread
- if board['board_type'] == "0":
- notice_post["subject"] = "Aviso"
- notice_post["name"] = "Sistema"
- notice_post["message"] = "El hilo ha sido movido a <a href=\"{url}\">/{newboard}/{newthread}</a>.".format(url=thread_url, newboard=newboard, newthread=newthread)
- notice_post["timestamp"] = timestamp()+1
- notice_post["timestamp_formatted"] = "Hilo movido"
- notice_post["bumped"] = oldbumped
- notice_post.insert()
- regenerateFrontPages()
- regenerateThreadPage(oldthread)
-
- # regenerate again (fix?)
- board = setBoard(newboardid)
- regenerateFrontPages()
- regenerateThreadPage(newthreadid)
-
- message += "done"
-
- logAction(staff_account['username'], "Movido hilo %s/%s a %s/%s." % (oldboard, oldthread, newboard, newthread))
- else:
- template_filename = "move.html"
- template_values = {'boards': boardlist(), 'oldboardid': oldboardid, 'oldthread': oldthread}
- elif path_split[2] == 'ban':
- if not moderator:
- return
-
- if len(path_split) > 4:
- board = setBoard(path_split[3])
- post = FetchOne('SELECT `ip` FROM `posts` WHERE `boardid` = ' + board['id'] + ' AND `id` = \'' + _mysql.escape_string(path_split[4]) + '\' LIMIT 1')
- formatted_ip = inet_ntoa(long(post['ip']))
- #Creo que esto no deberia ir aqui... -> UpdateDb('UPDATE `posts` SET `banned` = 1 WHERE `boardid` = ' + board['id'] + ' AND `id` = \'' + _mysql.escape_string(path_split[4]) + '\'')
- if not post:
- message = _('Unable to locate a post with that ID.')
- template_filename = "message.html"
- else:
- message = '<meta http-equiv="refresh" content="0;url=' + Settings.CGI_URL + 'manage/ban?ip=' + formatted_ip + '" />Espere...'
- template_filename = "message.html"
- else:
- #if path_split[3] == '':
- try:
- ip = self.formdata['ip']
- except:
- ip = ''
- try:
- netmask = insnetmask = self.formdata['netmask']
- if netmask == '255.255.255.255':
- insnetmask = ''
- except:
- netmask = instnetmask = ''
- #else:
- # ip = path_split[3]
- if ip != '':
- try:
- reason = self.formdata['reason']
- except:
- reason = None
- if reason is not None:
- if self.formdata['seconds'] != '0':
- until = str(timestamp() + int(self.formdata['seconds']))
- else:
- until = '0'
- where = ''
- if 'board_all' not in self.formdata.keys():
- where = []
- boards = FetchAll('SELECT `dir` FROM `boards`')
- for board in boards:
- keyname = 'board_' + board['dir']
- if keyname in self.formdata.keys():
- if self.formdata[keyname] == "1":
- where.append(board['dir'])
- if len(where) > 0:
- where = pickle.dumps(where)
+ if do_ban:
+ message = _('Redirecting to ban page...') + '<meta http-equiv="refresh" content="0;url=' + \
+ Settings.CGI_URL + 'manage/ban?ip=' + \
+ post['ip'] + '" />'
+ template_filename = "message.html"
+ elif path_split[2] == 'lock':
+ setLocked = 0
+
+ # Nos vamos al board y ubicamos el post
+ board = setBoard(path_split[3])
+ post = FetchOne('SELECT `parentid`, `locked` FROM `posts` WHERE `boardid` = ' +
+ board['id'] + ' AND `id` = \'' + _mysql.escape_string(path_split[4]) + '\' LIMIT 1')
+ if not post:
+ message = _('Unable to locate a post with that ID.')
+ template_filename = "message.html"
+ else:
+ if post['parentid'] != '0':
+ message = _('Post is not a thread opener.')
+ template_filename = "message.html"
+ else:
+ if post['locked'] == '0':
+ # Cerrar si esta abierto
+ setLocked = 1
+ else:
+ # Abrir si esta cerrado
+ setLocked = 0
+
+ UpdateDb("UPDATE `posts` SET `locked` = %d WHERE `boardid` = '%s' AND `id` = '%s' LIMIT 1" % (
+ setLocked, board["id"], _mysql.escape_string(path_split[4])))
+ threadUpdated(path_split[4])
+ if setLocked == 1:
+ message = _('Thread successfully closed.')
+ logAction(staff_account['username'], _('Closed thread %s') % (
+ '/' + path_split[3] + '/' + path_split[4]))
+ else:
+ message = _('Thread successfully opened.')
+ logAction(staff_account['username'], _('Opened thread %s') % (
+ '/' + path_split[3] + '/' + path_split[4]))
+ template_filename = "message.html"
+ elif path_split[2] == 'permasage':
+ setPermasaged = 0
+
+ # Nos vamos al board y ubicamos el post
+ board = setBoard(path_split[3])
+ post = FetchOne('SELECT `parentid`, `locked` FROM `posts` WHERE `boardid` = ' +
+ board['id'] + ' AND `id` = \'' + _mysql.escape_string(path_split[4]) + '\' LIMIT 1')
+ if not post:
+ message = 'Unable to locate a post with that ID.'
+ template_filename = "message.html"
+ elif post['locked'] == '1':
+ message = 'Solo se puede aplicar permasage en un hilo abierto.'
+ template_filename = "message.html"
else:
- self.error(_("You must select where the ban shall be placed"))
- return
-
- if 'edit' in self.formdata.keys():
- UpdateDb("DELETE FROM `bans` WHERE `id` = '" + _mysql.escape_string(self.formdata['edit']) + "' LIMIT 1")
- else:
- ban = FetchOne("SELECT `id` FROM `bans` WHERE `ip` = '" + _mysql.escape_string(ip) + "' AND `boards` = '" + _mysql.escape_string(where) + "' LIMIT 1")
- if ban:
- self.error(_('There is already an identical ban for this IP.') + '<a href="'+Settings.CGI_URL+'manage/ban/' + ip + '?edit=' + ban['id']+'">' + _('Edit') + '</a>')
- return
-
- # Blind mode
- if 'blind' in self.formdata.keys() and self.formdata['blind'] == '1':
- blind = '1'
- else:
- blind = '0'
-
- # Banear sin mensaje
- InsertDb("INSERT INTO `bans` (`ip`, `netmask`, `boards`, `added`, `until`, `staff`, `reason`, `note`, `blind`) VALUES (INET_ATON('" + _mysql.escape_string(ip) + "') & INET_ATON('"+_mysql.escape_string(netmask)+"'), INET_ATON('"+_mysql.escape_string(insnetmask)+"'), '" + _mysql.escape_string(where) + "', " + str(timestamp()) + ", " + until + ", '" + _mysql.escape_string(staff_account['username']) + "', '" + _mysql.escape_string(self.formdata['reason']) + "', '" + _mysql.escape_string(self.formdata['note']) + "', '"+blind+"')")
-
- regenerateAccess()
- if 'edit' in self.formdata.keys():
- message = _('Ban successfully edited.')
- action = 'Edited ban for ' + ip
- else:
- message = _('Ban successfully placed.')
- action = 'Banned ' + ip
- if until != '0':
- action += ' until ' + formatTimestamp(until)
+ if post['parentid'] != '0':
+ message = 'Post is not a thread opener.'
+ template_filename = "message.html"
+ else:
+ if post['locked'] == '2':
+ # Sacar permasage
+ setPermasaged = 0
+ else:
+ # Colocar permasage
+ setPermasaged = 2
+
+ UpdateDb("UPDATE `posts` SET `locked` = %d WHERE `boardid` = '%s' AND `id` = '%s' LIMIT 1" % (
+ setPermasaged, board["id"], _mysql.escape_string(path_split[4])))
+ regenerateFrontPages()
+ threadUpdated(path_split[4])
+
+ if setPermasaged == 2:
+ message = 'Thread successfully permasaged.'
+ logAction(
+ staff_account['username'], 'Enabled permasage in thread /' + path_split[3] + '/' + path_split[4])
+ else:
+ message = 'Thread successfully un-permasaged.'
+ logAction(
+ staff_account['username'], 'Disabled permasage in thread /' + path_split[3] + '/' + path_split[4])
+ template_filename = "message.html"
+ elif path_split[2] == 'move':
+ if not moderator:
+ return
+
+ oldboardid = ""
+ oldthread = ""
+ newboardid = ""
+ try:
+ oldboardid = path_split[3]
+ oldthread = path_split[4]
+ newboardid = path_split[5]
+ except:
+ pass
+
+ try:
+ oldboardid = self.formdata['oldboardid']
+ oldthread = self.formdata['oldthread']
+ newboardid = self.formdata['newboardid']
+ except:
+ pass
+
+ if oldboardid and oldthread and newboardid:
+ message = "import"
+ import shutil
+ message += "ok"
+
+ board = setBoard(oldboardid)
+ oldboard = board['dir']
+ oldboardsubject = board['subject']
+ oldboardname = random.choice(board["anonymous"].split('|'))
+
+ # get old posts
+ posts = FetchAll(
+ "SELECT * FROM `posts` WHERE (`id` = {0} OR `parentid` = {0}) AND `boardid` = {1} ORDER BY id ASC".format(oldthread, board['id']))
+
+ # switch to new board
+ board = setBoard(newboardid)
+ newboard = board['dir']
+
+ refs = {}
+ moved_files = []
+ moved_thumbs = []
+ moved_cats = []
+ newthreadid = 0
+ newthread = 0
+ num = 1
+
+ message = "from total: %s<br>" % len(posts)
+ template_filename = "message.html"
+
+ for p in posts:
+ # save old post ID
+ old_id = p['id']
+ is_op = bool(p['parentid'] == '0')
+
+ # copy post object but without ID and target boardid
+ post = Post()
+ post.post = p
+ post.post.pop("id")
+ post["boardid"] = board['id']
+ post["parentid"] = newthreadid
+
+ # save the files we need to move if any
+ if post['IS_DELETED'] == '0':
+ if post['file']:
+ moved_files.append(post['file'])
+ if post['thumb']:
+ moved_thumbs.append(post['thumb'])
+ if is_op:
+ moved_cats.append(post['thumb'])
+
+ # fix subject if necessary
+ if post['subject'] and post['subject'] == oldboardsubject:
+ post['subject'] = board['subject']
+
+ # fix new default name
+ if post['name'] == oldboardname:
+ post['name'] = board['anonymous']
+
+ # fix date and (re)add post ID if necessary
+ post['timestamp_formatted'] = formatTimestamp(
+ post['timestamp'])
+ if board["useid"] != '0':
+ if post["parentid"]:
+ tym = parent_time
+ else:
+ tym = post["timestamp"]
+ post['timestamp_formatted'] += ' ID:' + iphash(inet_ntoa(long(
+ post['ip'])), post, tym, board["useid"], False, '', False, False, (board["countrycode"] in ['1', '2']))
+
+ # insert new post and get its new ID
+ new_id = post.insert()
+
+ # save the reference (BBS = post number, IB = new ID)
+ refs[old_id] = num if board['board_type'] == '1' else new_id
+
+ # this was an OP
+ message += "newthread = %s parentid = %s<br>" % (
+ newthreadid, p['parentid'])
+ if is_op:
+ oldthread = old_id
+ newthreadid = new_id
+ oldbumped = post["bumped"]
+
+ # BBS = new thread timestamp, IB = new thread ID
+ newthread = post['timestamp'] if board['board_type'] == '1' else new_id
+ parent_time = post['timestamp']
+
+ # log it
+ message += "%s -> %s<br>" % (old_id, new_id)
+
+ num += 1
+
+ # fix anchors
+ for old, new in refs.iteritems():
+ old_url = "/{oldboard}/res/{oldthread}.html#{oldpost}\">&gt;&gt;{oldpost}</a>".format(
+ oldboard=oldboard, oldthread=oldthread, oldpost=old)
+
+ if board['board_type'] == '1':
+ new_url = "/{newboard}/read/{newthread}/{newpost}\">&gt;&gt;{newpost}</a>".format(
+ newboard=newboard, newthread=newthread, newpost=new)
+ else:
+ new_url = "/{newboard}/res/{newthread}.html#{newpost}\">&gt;&gt;{newpost}</a>".format(
+ newboard=newboard, newthread=newthread, newpost=new)
+
+ sql = "UPDATE `posts` SET `message` = REPLACE(message, '{old}', '{new}') WHERE `boardid` = {newboardid} AND (`id` = {newthreadid} OR `parentid` = {newthreadid})".format(
+ old=old_url, new=new_url, newboardid=board['id'], newthreadid=newthreadid)
+ message += sql + "<br>"
+ UpdateDb(sql)
+
+ # copy files
+ for file in moved_files:
+ if not os.path.isfile(Settings.IMAGES_DIR + newboard + "/src/" + file):
+ shutil.copyfile(Settings.IMAGES_DIR + oldboard + "/src/" +
+ file, Settings.IMAGES_DIR + newboard + "/src/" + file)
+ for thumb in moved_thumbs:
+ if not os.path.isfile(Settings.IMAGES_DIR + newboard + "/thumb/" + thumb):
+ shutil.copyfile(Settings.IMAGES_DIR + oldboard + "/thumb/" +
+ thumb, Settings.IMAGES_DIR + newboard + "/thumb/" + thumb)
+ if not os.path.isfile(Settings.IMAGES_DIR + newboard + "/mobile/" + thumb):
+ shutil.copyfile(Settings.IMAGES_DIR + oldboard + "/mobile/" +
+ thumb, Settings.IMAGES_DIR + newboard + "/mobile/" + thumb)
+ for cat in moved_cats:
+ try:
+ if not os.path.isfile(Settings.IMAGES_DIR + newboard + "/cat/" + thumb):
+ shutil.copyfile(Settings.IMAGES_DIR + oldboard + "/cat/" +
+ thumb, Settings.IMAGES_DIR + newboard + "/cat/" + thumb)
+ except:
+ pass
+
+ # lock original, set expiration to 1 day
+ exp = timestamp()+86400
+ exp_format = datetime.datetime.fromtimestamp(
+ exp).strftime("%d/%m")
+ sql = "UPDATE `posts` SET `locked`=1, `expires`={exp}, `expires_formatted`=\"{exp_format}\" WHERE `boardid`=\"{oldboard}\" AND id=\"{oldthread}\"".format(
+ exp=exp, exp_format=exp_format, oldboard=oldboardid, oldthread=oldthread)
+ UpdateDb(sql)
+
+ # insert notice message
+ if 'msg' in self.formdata:
+ leavemsg = True
+ board = setBoard(oldboard)
+
+ if board['board_type'] == '1':
+ thread_url = "/{newboard}/read/{newthread}".format(
+ newboard=newboard, newthread=newthread)
+ else:
+ thread_url = "/{newboard}/res/{newthread}.html".format(
+ newboard=newboard, newthread=newthread)
+
+ notice_post = Post(board["id"])
+ notice_post["parentid"] = oldthread
+ if board['board_type'] == "0":
+ notice_post["subject"] = "Aviso"
+ notice_post["name"] = "Sistema"
+ notice_post["message"] = "El hilo ha sido movido a <a href=\"{url}\">/{newboard}/{newthread}</a>.".format(
+ url=thread_url, newboard=newboard, newthread=newthread)
+ notice_post["timestamp"] = timestamp()+1
+ notice_post["timestamp_formatted"] = "Hilo movido"
+ notice_post["bumped"] = oldbumped
+ notice_post.insert()
+ regenerateFrontPages()
+ regenerateThreadPage(oldthread)
+
+ # regenerate again (fix?)
+ board = setBoard(newboardid)
+ regenerateFrontPages()
+ regenerateThreadPage(newthreadid)
+
+ message += "done"
+
+ logAction(staff_account['username'], "Movido hilo %s/%s a %s/%s." %
+ (oldboard, oldthread, newboard, newthread))
else:
- action += ' permanently'
- logAction(staff_account['username'], action)
- template_filename = 'message.html'
- else:
- startvalues = {'where': [],
- 'netmask': '255.255.255.255',
- 'reason': '',
- 'note': '',
- 'message': '(GET OUT)',
- 'seconds': '0',
- 'blind': '1'}
- edit_id = 0
- if 'edit' in self.formdata.keys():
- edit_id = self.formdata['edit']
- ban = FetchOne("SELECT `id`, INET_NTOA(`ip`) AS 'ip', CASE WHEN `netmask` IS NULL THEN '255.255.255.255' ELSE INET_NTOA(`netmask`) END AS 'netmask', boards, added, until, staff, reason, note, blind FROM `bans` WHERE `id` = '" + _mysql.escape_string(edit_id) + "' ORDER BY `added` DESC")
- if ban:
- if ban['boards'] == '':
- where = ''
- else:
- where = pickle.loads(ban['boards'])
- if ban['until'] == '0':
- until = 0
- else:
- until = int(ban['until']) - timestamp()
- startvalues = {'where': where,
- 'netmask': ban['netmask'],
- 'reason': ban['reason'],
- 'note': ban['note'],
- 'seconds': str(until),
- 'blind': ban['blind']
- }
+ template_filename = "move.html"
+ template_values = {'boards': boardlist(
+ ), 'oldboardid': oldboardid, 'oldthread': oldthread}
+ elif path_split[2] == 'ban':
+ if not moderator:
+ return
+
+ if len(path_split) > 4:
+ board = setBoard(path_split[3])
+ post = FetchOne('SELECT `ip` FROM `posts` WHERE `boardid` = ' +
+ board['id'] + ' AND `id` = \'' + _mysql.escape_string(path_split[4]) + '\' LIMIT 1')
+ formatted_ip = inet_ntoa(long(post['ip']))
+ # Creo que esto no deberia ir aqui... -> UpdateDb('UPDATE `posts` SET `banned` = 1 WHERE `boardid` = ' + board['id'] + ' AND `id` = \'' + _mysql.escape_string(path_split[4]) + '\'')
+ if not post:
+ message = _('Unable to locate a post with that ID.')
+ template_filename = "message.html"
+ else:
+ message = '<meta http-equiv="refresh" content="0;url=' + \
+ Settings.CGI_URL + 'manage/ban?ip=' + formatted_ip + '" />Espere...'
+ template_filename = "message.html"
else:
- edit_id = 0
-
- template_filename = "bans.html"
- template_values = {'mode': 1,
- 'boards': boardlist(),
- 'ip': ip,
- 'startvalues': startvalues,
- 'edit_id': edit_id}
- elif path_split[2] == 'bans':
- if not moderator:
- return
-
- action_taken = False
- if len(path_split) > 4:
- if path_split[3] == 'delete':
- ip = FetchOne("SELECT INET_NTOA(`ip`) AS 'ip' FROM `bans` WHERE `id` = '" + _mysql.escape_string(path_split[4]) + "' LIMIT 1", 0)[0]
- if ip != '':
- # Delete ban
- UpdateDb('DELETE FROM `bans` WHERE `id` = ' + _mysql.escape_string(path_split[4]) + ' LIMIT 1')
- regenerateAccess()
- message = _('Ban successfully deleted.')
- template_filename = "message.html"
- logAction(staff_account['username'], _('Deleted ban for %s') % ip)
- else:
- message = _('There was a problem while deleting that ban. It may have already been removed, or recently expired.')
- template_filename = "message.html"
-
- if not action_taken:
- bans = FetchAll("SELECT `id`, INET_NTOA(`ip`) AS 'ip', CASE WHEN `netmask` IS NULL THEN '255.255.255.255' ELSE INET_NTOA(`netmask`) END AS 'netmask', boards, added, until, staff, reason, note, blind FROM `bans` ORDER BY `added` DESC")
- if bans:
- for ban in bans:
- if ban['boards'] == '':
- ban['boards'] = _('All boards')
- else:
- where = pickle.loads(ban['boards'])
- if len(where) > 1:
- ban['boards'] = '/' + '/, /'.join(where) + '/'
+ # if path_split[3] == '':
+ try:
+ ip = self.formdata['ip']
+ except:
+ ip = ''
+ try:
+ netmask = insnetmask = self.formdata['netmask']
+ if netmask == '255.255.255.255':
+ insnetmask = ''
+ except:
+ netmask = instnetmask = ''
+ # else:
+ # ip = path_split[3]
+ if ip != '':
+ try:
+ reason = self.formdata['reason']
+ except:
+ reason = None
+ if reason is not None:
+ if self.formdata['seconds'] != '0':
+ until = str(
+ timestamp() + int(self.formdata['seconds']))
+ else:
+ until = '0'
+ where = ''
+ if 'board_all' not in self.formdata.keys():
+ where = []
+ boards = FetchAll('SELECT `dir` FROM `boards`')
+ for board in boards:
+ keyname = 'board_' + board['dir']
+ if keyname in self.formdata.keys():
+ if self.formdata[keyname] == "1":
+ where.append(board['dir'])
+ if len(where) > 0:
+ where = pickle.dumps(where)
+ else:
+ self.error(
+ _("You must select where the ban shall be placed"))
+ return
+
+ if 'edit' in self.formdata.keys():
+ UpdateDb("DELETE FROM `bans` WHERE `id` = '" +
+ _mysql.escape_string(self.formdata['edit']) + "' LIMIT 1")
+ else:
+ ban = FetchOne("SELECT `id` FROM `bans` WHERE `ip` = '" + _mysql.escape_string(
+ ip) + "' AND `boards` = '" + _mysql.escape_string(where) + "' LIMIT 1")
+ if ban:
+ self.error(_('There is already an identical ban for this IP.') + '<a href="' +
+ Settings.CGI_URL+'manage/ban/' + ip + '?edit=' + ban['id']+'">' + _('Edit') + '</a>')
+ return
+
+ # Blind mode
+ if 'blind' in self.formdata.keys() and self.formdata['blind'] == '1':
+ blind = '1'
+ else:
+ blind = '0'
+
+ # Banear sin mensaje
+ InsertDb("INSERT INTO `bans` (`ip`, `netmask`, `boards`, `added`, `until`, `staff`, `reason`, `note`, `blind`) VALUES (INET_ATON('" + _mysql.escape_string(ip) + "') & INET_ATON('"+_mysql.escape_string(netmask)+"'), INET_ATON('"+_mysql.escape_string(insnetmask)+"'), '" +
+ _mysql.escape_string(where) + "', " + str(timestamp()) + ", " + until + ", '" + _mysql.escape_string(staff_account['username']) + "', '" + _mysql.escape_string(self.formdata['reason']) + "', '" + _mysql.escape_string(self.formdata['note']) + "', '"+blind+"')")
+
+ regenerateAccess()
+ if 'edit' in self.formdata.keys():
+ message = _('Ban successfully edited.')
+ action = 'Edited ban for ' + ip
+ else:
+ message = _('Ban successfully placed.')
+ action = 'Banned ' + ip
+ if until != '0':
+ action += ' until ' + \
+ formatTimestamp(until)
+ else:
+ action += ' permanently'
+ logAction(staff_account['username'], action)
+ template_filename = 'message.html'
+ else:
+ startvalues = {'where': [],
+ 'netmask': '255.255.255.255',
+ 'reason': '',
+ 'note': '',
+ 'message': '(GET OUT)',
+ 'seconds': '0',
+ 'blind': '1'}
+ edit_id = 0
+ if 'edit' in self.formdata.keys():
+ edit_id = self.formdata['edit']
+ ban = FetchOne("SELECT `id`, INET_NTOA(`ip`) AS 'ip', CASE WHEN `netmask` IS NULL THEN '255.255.255.255' ELSE INET_NTOA(`netmask`) END AS 'netmask', boards, added, until, staff, reason, note, blind FROM `bans` WHERE `id` = '" +
+ _mysql.escape_string(edit_id) + "' ORDER BY `added` DESC")
+ if ban:
+ if ban['boards'] == '':
+ where = ''
+ else:
+ where = pickle.loads(ban['boards'])
+ if ban['until'] == '0':
+ until = 0
+ else:
+ until = int(ban['until']) - timestamp()
+ startvalues = {'where': where,
+ 'netmask': ban['netmask'],
+ 'reason': ban['reason'],
+ 'note': ban['note'],
+ 'seconds': str(until),
+ 'blind': ban['blind']
+ }
+ else:
+ edit_id = 0
+
+ template_filename = "bans.html"
+ template_values = {'mode': 1,
+ 'boards': boardlist(),
+ 'ip': ip,
+ 'startvalues': startvalues,
+ 'edit_id': edit_id}
+ elif path_split[2] == 'bans':
+ if not moderator:
+ return
+
+ action_taken = False
+ if len(path_split) > 4:
+ if path_split[3] == 'delete':
+ ip = FetchOne("SELECT INET_NTOA(`ip`) AS 'ip' FROM `bans` WHERE `id` = '" +
+ _mysql.escape_string(path_split[4]) + "' LIMIT 1", 0)[0]
+ if ip != '':
+ # Delete ban
+ UpdateDb('DELETE FROM `bans` WHERE `id` = ' +
+ _mysql.escape_string(path_split[4]) + ' LIMIT 1')
+ regenerateAccess()
+ message = _('Ban successfully deleted.')
+ template_filename = "message.html"
+ logAction(staff_account['username'], _(
+ 'Deleted ban for %s') % ip)
+ else:
+ message = _(
+ 'There was a problem while deleting that ban. It may have already been removed, or recently expired.')
+ template_filename = "message.html"
+
+ if not action_taken:
+ bans = FetchAll(
+ "SELECT `id`, INET_NTOA(`ip`) AS 'ip', CASE WHEN `netmask` IS NULL THEN '255.255.255.255' ELSE INET_NTOA(`netmask`) END AS 'netmask', boards, added, until, staff, reason, note, blind FROM `bans` ORDER BY `added` DESC")
+ if bans:
+ for ban in bans:
+ if ban['boards'] == '':
+ ban['boards'] = _('All boards')
+ else:
+ where = pickle.loads(ban['boards'])
+ if len(where) > 1:
+ ban['boards'] = '/' + \
+ '/, /'.join(where) + '/'
+ else:
+ ban['boards'] = '/' + where[0] + '/'
+ ban['added'] = formatTimestamp(ban['added'])
+ if ban['until'] == '0':
+ ban['until'] = _('Does not expire')
+ else:
+ ban['until'] = formatTimestamp(ban['until'])
+ if ban['blind'] == '1':
+ ban['blind'] = 'Sí'
+ else:
+ ban['blind'] = 'No'
+ template_filename = "bans.html"
+ template_values = {'mode': 0, 'bans': bans}
+ elif path_split[2] == 'changepassword':
+ form_submitted = False
+ try:
+ if self.formdata['oldpassword'] != '' and self.formdata['newpassword'] != '' and self.formdata['newpassword2'] != '':
+ form_submitted = True
+ except:
+ pass
+ if form_submitted:
+ if verifyPasswd(staff_account['username'], self.formdata['oldpassword']):
+ if self.formdata['newpassword'] == self.formdata['newpassword2']:
+ UpdateDb('UPDATE `staff` SET `password` = \'' + genPasswdHash(
+ self.formdata['newpassword']) + '\' WHERE `id` = ' + staff_account['id'] + ' LIMIT 1')
+ message = _(
+ 'Password successfully changed. Please log out and log back in.')
+ template_filename = "message.html"
+ else:
+ message = _('Passwords did not match.')
+ template_filename = "message.html"
+ else:
+ message = _('Current password incorrect.')
+ template_filename = "message.html"
else:
- ban['boards'] = '/' + where[0] + '/'
- ban['added'] = formatTimestamp(ban['added'])
- if ban['until'] == '0':
- ban['until'] = _('Does not expire')
- else:
- ban['until'] = formatTimestamp(ban['until'])
- if ban['blind'] == '1':
- ban['blind'] = 'Sí'
- else:
- ban['blind'] = 'No'
- template_filename = "bans.html"
- template_values = {'mode': 0, 'bans': bans}
- elif path_split[2] == 'changepassword':
- form_submitted = False
- try:
- if self.formdata['oldpassword'] != '' and self.formdata['newpassword'] != '' and self.formdata['newpassword2'] != '':
- form_submitted = True
- except:
- pass
- if form_submitted:
- if verifyPasswd(staff_account['username'], self.formdata['oldpassword']):
- if self.formdata['newpassword'] == self.formdata['newpassword2']:
- UpdateDb('UPDATE `staff` SET `password` = \'' + genPasswdHash(self.formdata['newpassword']) + '\' WHERE `id` = ' + staff_account['id'] + ' LIMIT 1')
- message = _('Password successfully changed. Please log out and log back in.')
- template_filename = "message.html"
- else:
- message = _('Passwords did not match.')
- template_filename = "message.html"
- else:
- message = _('Current password incorrect.')
- template_filename = "message.html"
- else:
- template_filename = "changepassword.html"
- template_values = {}
- elif path_split[2] == 'board':
- if not administrator:
- return
-
- if len(path_split) > 3:
- board = setBoard(path_split[3])
- form_submitted = False
- try:
- if self.formdata['name'] != '':
- form_submitted = True
- except:
- pass
- if form_submitted:
- # Update board settings
- board['name'] = self.formdata['name']
- board['longname'] = self.formdata['longname']
- board['subname'] = self.formdata['subname']
- board['anonymous'] = self.formdata['anonymous']
- board['subject'] = self.formdata['subject']
- board['message'] = self.formdata['message']
- switchBoard(self.formdata['type'])
- board['board_type'] = self.formdata['type']
- board['useid'] = self.formdata['useid']
- board['slip'] = self.formdata['slip']
- board['countrycode'] = self.formdata['countrycode']
- if 'recyclebin' in self.formdata.keys():
- board['recyclebin'] = '1'
- else:
- board['recyclebin'] = '0'
- if 'disable_name' in self.formdata.keys():
- board['disable_name'] = '1'
- else:
- board['disable_name'] = '0'
- if 'disable_subject' in self.formdata.keys():
- board['disable_subject'] = '1'
- else:
- board['disable_subject'] = '0'
- if 'secret' in self.formdata.keys():
- board['secret'] = '1'
- else:
- board['secret'] = '0'
- if 'locked' in self.formdata.keys():
- board['locked'] = '1'
- else:
- board['locked'] = '0'
- board['postarea_desc'] = self.formdata['postarea_desc']
- if 'allow_noimage' in self.formdata.keys():
- board['allow_noimage'] = '1'
- else:
- board['allow_noimage'] = '0'
- if 'allow_images' in self.formdata.keys():
- board['allow_images'] = '1'
- else:
- board['allow_images'] = '0'
- if 'allow_image_replies' in self.formdata.keys():
- board['allow_image_replies'] = '1'
- else:
- board['allow_image_replies'] = '0'
- if 'allow_spoilers' in self.formdata.keys():
- board['allow_spoilers'] = '1'
- else:
- board['allow_spoilers'] = '0'
- if 'allow_oekaki' in self.formdata.keys():
- board['allow_oekaki'] = '1'
- else:
- board['allow_oekaki'] = '0'
- if 'archive' in self.formdata.keys():
- board['archive'] = '1'
- else:
- board['archive'] = '0'
- board['postarea_extra'] = self.formdata['postarea_extra']
- board['force_css'] = self.formdata['force_css']
-
- # Update file types
- UpdateDb("DELETE FROM `boards_filetypes` WHERE `boardid` = %s" % board['id'])
- for filetype in filetypelist():
- if 'filetype'+filetype['ext'] in self.formdata.keys():
- UpdateDb("INSERT INTO `boards_filetypes` VALUES (%s, %s)" % (board['id'], filetype['id']))
-
- try:
- board['numthreads'] = int(self.formdata['numthreads'])
- except:
- raise UserError, _("Max threads shown must be numeric.")
-
- try:
- board['numcont'] = int(self.formdata['numcont'])
- except:
- raise UserError, _("Max replies shown must be numeric.")
-
- try:
- board['numline'] = int(self.formdata['numline'])
- except:
- raise UserError, _("Max lines shown must be numeric.")
-
- try:
- board['thumb_px'] = int(self.formdata['thumb_px'])
- except:
- raise UserError, _("Max thumb dimensions must be numeric.")
-
- try:
- board['maxsize'] = int(self.formdata['maxsize'])
- except:
- raise UserError, _("Max size must be numeric.")
-
- try:
- board['maxage'] = int(self.formdata['maxage'])
- except:
- raise UserError, _("Max age must be numeric.")
-
- try:
- board['maxinactive'] = int(self.formdata['maxinactive'])
- except:
- raise UserError, _("Max inactivity must be numeric.")
-
- try:
- board['threadsecs'] = int(self.formdata['threadsecs'])
- except:
- raise UserError, _("Time between new threads must be numeric.")
-
- try:
- board['postsecs'] = int(self.formdata['postsecs'])
- except:
- raise UserError, _("Time between replies must be numeric.")
-
- updateBoardSettings()
- message = _('Board options successfully updated.') + ' <a href="'+Settings.CGI_URL+'manage/rebuild/'+board['dir']+'">'+_('Rebuild')+'</a>'
- template_filename = "message.html"
- logAction(staff_account['username'], _('Updated options for /%s/') % board['dir'])
- else:
- template_filename = "boardoptions.html"
- template_values = {'mode': 1, 'boardopts': board, 'filetypes': filetypelist(), 'supported_filetypes': board['filetypes_ext']}
- else:
- # List all boards
- template_filename = "boardoptions.html"
- template_values = {'mode': 0, 'boards': boardlist()}
- elif path_split[2] == 'recyclebin':
- if not administrator:
- return
-
- message = None
- if len(path_split) > 5:
- if path_split[4] == 'restore':
- board = setBoard(path_split[5])
-
- post = FetchOne('SELECT `parentid` FROM `posts` WHERE `boardid` = ' + board['id'] + ' AND `id` = \'' + _mysql.escape_string(path_split[6]) + '\' LIMIT 1')
- if not post:
- message = _('Unable to locate a post with that ID.') + '<br />'
- template_filename = "message.html"
- else:
- UpdateDb('UPDATE `posts` SET `IS_DELETED` = 0 WHERE `boardid` = ' + board['id'] + ' AND `id` = \'' + _mysql.escape_string(path_split[6]) + '\' LIMIT 1')
- if post['parentid'] != '0':
- threadUpdated(post['parentid'])
- else:
- regenerateFrontPages()
-
- message = _('Post successfully restored.')
- logAction(staff_account['username'], _('Restored post %s') % ('/' + path_split[5] + '/' + path_split[6]))
-
- if path_split[4] == 'delete':
- board = setBoard(path_split[5])
- post = FetchOne('SELECT id, parentid, message, INET_NTOA(ip) AS ip FROM `posts` WHERE `boardid` = ' + board['id'] + ' AND `id` = \'' + _mysql.escape_string(path_split[6]) + '\' LIMIT 1')
- if not post:
- message = _('Unable to locate a post with that ID.')
- else:
- deletePost(path_split[6], None)
-
- if post['parentid'] != '0':
- threadUpdated(post['parentid'])
- else:
- regenerateFrontPages()
-
- message = "Eliminado post %s permanentemente." % ('/' + board['dir'] + '/' + post['id'])
- logAction(staff_account['username'], message + ' Contenido: ' + post['message'] + ' IP: ' + post['ip'])
-
- # Delete more than 1 post
- if 'deleteall' in self.formdata.keys():
- return # TODO
- deleted = 0
- for key in self.formdata.keys():
- if key[:2] == '!i':
- dir = key[2:].split('/')[0] # Board where the post is
- postid = key[2:].split('/')[1] # Post to delete
-
- # Delete post start
- post = FetchOne('SELECT `parentid`, `dir` FROM `posts` INNER JOIN `boards` ON posts.boardid = boards.id WHERE `dir` = \'' + _mysql.escape_string(dir) + '\' AND posts.id = \'' + _mysql.escape_string(postid) + '\' LIMIT 1')
- if not post:
- message = _('Unable to locate a post with that ID.')
- else:
- board = setBoard(dir)
- deletePost(int(postid), None)
- if post['parentid'] != '0':
- threadUpdated(post['parentid'])
+ template_filename = "changepassword.html"
+ template_values = {}
+ elif path_split[2] == 'board':
+ if not administrator:
+ return
+
+ if len(path_split) > 3:
+ board = setBoard(path_split[3])
+ form_submitted = False
+ try:
+ if self.formdata['name'] != '':
+ form_submitted = True
+ except:
+ pass
+ if form_submitted:
+ # Update board settings
+ board['name'] = self.formdata['name']
+ board['longname'] = self.formdata['longname']
+ board['subname'] = self.formdata['subname']
+ board['anonymous'] = self.formdata['anonymous']
+ board['subject'] = self.formdata['subject']
+ board['message'] = self.formdata['message']
+ switchBoard(self.formdata['type'])
+ board['board_type'] = self.formdata['type']
+ board['useid'] = self.formdata['useid']
+ board['slip'] = self.formdata['slip']
+ board['countrycode'] = self.formdata['countrycode']
+ if 'recyclebin' in self.formdata.keys():
+ board['recyclebin'] = '1'
+ else:
+ board['recyclebin'] = '0'
+ if 'disable_name' in self.formdata.keys():
+ board['disable_name'] = '1'
+ else:
+ board['disable_name'] = '0'
+ if 'disable_subject' in self.formdata.keys():
+ board['disable_subject'] = '1'
+ else:
+ board['disable_subject'] = '0'
+ if 'secret' in self.formdata.keys():
+ board['secret'] = '1'
+ else:
+ board['secret'] = '0'
+ if 'locked' in self.formdata.keys():
+ board['locked'] = '1'
+ else:
+ board['locked'] = '0'
+ board['postarea_desc'] = self.formdata['postarea_desc']
+ if 'allow_noimage' in self.formdata.keys():
+ board['allow_noimage'] = '1'
+ else:
+ board['allow_noimage'] = '0'
+ if 'allow_images' in self.formdata.keys():
+ board['allow_images'] = '1'
+ else:
+ board['allow_images'] = '0'
+ if 'allow_image_replies' in self.formdata.keys():
+ board['allow_image_replies'] = '1'
+ else:
+ board['allow_image_replies'] = '0'
+ if 'allow_spoilers' in self.formdata.keys():
+ board['allow_spoilers'] = '1'
+ else:
+ board['allow_spoilers'] = '0'
+ if 'allow_oekaki' in self.formdata.keys():
+ board['allow_oekaki'] = '1'
+ else:
+ board['allow_oekaki'] = '0'
+ if 'archive' in self.formdata.keys():
+ board['archive'] = '1'
+ else:
+ board['archive'] = '0'
+ board['postarea_extra'] = self.formdata['postarea_extra']
+ board['force_css'] = self.formdata['force_css']
+
+ # Update file types
+ UpdateDb(
+ "DELETE FROM `boards_filetypes` WHERE `boardid` = %s" % board['id'])
+ for filetype in filetypelist():
+ if 'filetype'+filetype['ext'] in self.formdata.keys():
+ UpdateDb("INSERT INTO `boards_filetypes` VALUES (%s, %s)" % (
+ board['id'], filetype['id']))
+
+ try:
+ board['numthreads'] = int(
+ self.formdata['numthreads'])
+ except:
+ raise UserError, _("Max threads shown must be numeric.")
+
+ try:
+ board['numcont'] = int(self.formdata['numcont'])
+ except:
+ raise UserError, _("Max replies shown must be numeric.")
+
+ try:
+ board['numline'] = int(self.formdata['numline'])
+ except:
+ raise UserError, _("Max lines shown must be numeric.")
+
+ try:
+ board['thumb_px'] = int(self.formdata['thumb_px'])
+ except:
+ raise UserError, _("Max thumb dimensions must be numeric.")
+
+ try:
+ board['maxsize'] = int(self.formdata['maxsize'])
+ except:
+ raise UserError, _("Max size must be numeric.")
+
+ try:
+ board['maxage'] = int(self.formdata['maxage'])
+ except:
+ raise UserError, _("Max age must be numeric.")
+
+ try:
+ board['maxinactive'] = int(
+ self.formdata['maxinactive'])
+ except:
+ raise UserError, _("Max inactivity must be numeric.")
+
+ try:
+ board['threadsecs'] = int(
+ self.formdata['threadsecs'])
+ except:
+ raise UserError, _("Time between new threads must be numeric.")
+
+ try:
+ board['postsecs'] = int(self.formdata['postsecs'])
+ except:
+ raise UserError, _("Time between replies must be numeric.")
+
+ updateBoardSettings()
+ message = _('Board options successfully updated.') + ' <a href="' + \
+ Settings.CGI_URL+'manage/rebuild/' + \
+ board['dir']+'">'+_('Rebuild')+'</a>'
+ template_filename = "message.html"
+ logAction(staff_account['username'], _(
+ 'Updated options for /%s/') % board['dir'])
+ else:
+ template_filename = "boardoptions.html"
+ template_values = {'mode': 1, 'boardopts': board, 'filetypes': filetypelist(
+ ), 'supported_filetypes': board['filetypes_ext']}
else:
- regenerateFrontPages()
- deleted += 1
- # Delete post end
-
- logAction(staff_account['username'], _('Permadeleted %s post(s).') % str(deleted))
- message = _('Permadeleted %s post(s).') % str(deleted)
-
- ## Start
- import math
- pagesize = float(Settings.RECYCLEBIN_POSTS_PER_PAGE)
-
- try:
- currentpage = int(path_split[3])
- except:
- currentpage = 0
-
- skip = False
- if 'type' in self.formdata.keys():
- type = int(self.formdata["type"])
- else:
- type = 0
-
- # Generate board list
- boards = FetchAll('SELECT `name`, `dir` FROM `boards` ORDER BY `dir`')
- for board in boards:
- if 'board' in self.formdata.keys() and self.formdata['board'] == board['dir']:
- board['checked'] = True
- else:
- board['checked'] = False
-
- # Get type filter
- if type != 0:
- type_condition = "= " + str(type)
- else:
- type_condition = "!= 0"
-
- # Table
- if 'board' in self.formdata.keys() and self.formdata['board'] != 'all':
- cboard = self.formdata['board']
- posts = FetchAll("SELECT posts.id, posts.timestamp, timestamp_formatted, IS_DELETED, INET_NTOA(posts.ip) as ip, posts.message, dir, boardid FROM `posts` INNER JOIN `boards` ON boardid = boards.id WHERE `dir` = '%s' AND IS_DELETED %s ORDER BY `timestamp` DESC LIMIT %d, %d" % (_mysql.escape_string(self.formdata['board']), _mysql.escape_string(type_condition), currentpage*pagesize, pagesize))
- try:
- totals = FetchOne("SELECT COUNT(id) FROM `posts` WHERE IS_DELETED %s AND `boardid` = %s" % (_mysql.escape_string(type_condition), _mysql.escape_string(posts[0]['boardid'])), 0)
- except:
- skip = True
- else:
- cboard = 'all'
- posts = FetchAll("SELECT posts.id, posts.timestamp, timestamp_formatted, IS_DELETED, posts.ip, posts.message, dir FROM `posts` INNER JOIN `boards` ON boardid = boards.id WHERE IS_DELETED %s ORDER BY `timestamp` DESC LIMIT %d, %d" % (_mysql.escape_string(type_condition), currentpage*pagesize, pagesize))
- totals = FetchOne("SELECT COUNT(id) FROM `posts` WHERE IS_DELETED %s" % _mysql.escape_string(type_condition), 0)
-
- template_filename = "recyclebin.html"
- template_values = {'message': message,
- 'type': type,
- 'boards': boards,
- 'skip': skip}
-
- if not skip:
- # Calculate number of pages
- total = int(totals[0])
- pages = int(math.ceil(total / pagesize))
-
- # Create delete form
- if 'board' in self.formdata.keys():
- board = self.formdata['board']
- else:
- board = None
-
- navigator = ''
- if currentpage > 0:
- navigator += '<a href="'+Settings.CGI_URL+'manage/recyclebin/'+str(currentpage-1)+'?type='+str(type)+'&amp;board='+cboard+'">&lt;</a> '
- else:
- navigator += '&lt; '
-
- for i in range(pages):
- if i != currentpage:
- navigator += '<a href="'+Settings.CGI_URL+'manage/recyclebin/'+str(i)+'?type='+str(type)+'&amp;board='+cboard+'">'+str(i)+'</a> '
- else:
- navigator += str(i)+' '
-
- if currentpage < (pages-1):
- navigator += '<a href="'+Settings.CGI_URL+'manage/recyclebin/'+str(currentpage+1)+'?type='+str(type)+'&amp;board='+cboard+'">&gt;</a> '
- else:
- navigator += '&gt; '
-
- template_values.update({'currentpage': currentpage,
- 'curboard': board,
- 'posts': posts,
- 'navigator': navigator})
- # End recyclebin
- elif path_split[2] == 'lockboard':
- if not administrator:
- return
-
- try:
- board_dir = path_split[3]
- except:
- board_dir = ''
-
- if board_dir == '':
- template_filename = "lockboard.html"
- template_values = {'boards': boardlist()}
- elif path_split[2] == 'boardlock':
- board = setBoard(path_split[3])
- if int(board['locked']):
- # Si esta cerrado... abrir
- board['locked'] = 0
- updateBoardSettings()
- message = _('Board opened successfully.')
- template_filename = "message.html"
- else:
- # Si esta abierta, cerrar
- board['locked'] = 1
- updateBoardSettings()
- message = _('Board closed successfully.')
- template_filename = "message.html"
- elif path_split[2] == 'addboard':
- if not administrator:
- return
-
- action_taken = False
- board_dir = ''
-
- try:
- if self.formdata['name'] != '':
- board_dir = self.formdata['dir']
- except:
- pass
-
- if board_dir != '':
- action_taken = True
- board_exists = FetchOne('SELECT * FROM `boards` WHERE `dir` = \'' + _mysql.escape_string(board_dir) + '\' LIMIT 1')
- if not board_exists:
- os.mkdir(Settings.ROOT_DIR + board_dir)
- os.mkdir(Settings.ROOT_DIR + board_dir + '/res')
- if not os.path.exists(Settings.IMAGES_DIR + board_dir):
- os.mkdir(Settings.IMAGES_DIR + board_dir)
- os.mkdir(Settings.IMAGES_DIR + board_dir + '/src')
- os.mkdir(Settings.IMAGES_DIR + board_dir + '/thumb')
- os.mkdir(Settings.IMAGES_DIR + board_dir + '/mobile')
- os.mkdir(Settings.IMAGES_DIR + board_dir + '/cat')
- if os.path.exists(Settings.ROOT_DIR + board_dir) and os.path.isdir(Settings.ROOT_DIR + board_dir):
- UpdateDb('INSERT INTO `boards` (`dir`, `name`) VALUES (\'' + _mysql.escape_string(board_dir) + '\', \'' + _mysql.escape_string(self.formdata['name']) + '\')')
- board = setBoard(board_dir)
- f = open(Settings.ROOT_DIR + board['dir'] + '/.htaccess', 'w')
- try:
- f.write('DirectoryIndex index.html')
- finally:
- f.close()
- regenerateFrontPages()
- message = _('Board added')
- template_filename = "message.html"
- logAction(staff_account['username'], _('Added board %s') % ('/' + board['dir'] + '/'))
- else:
- message = _('There was a problem while making the directories.')
- template_filename = "message.html"
- else:
- message = _('There is already a board with that directory.')
- template_filename = "message.html"
-
- if not action_taken:
- template_filename = "addboard.html"
- template_values = {}
- elif path_split[2] == 'trim':
- if not administrator:
- return
- board = setBoard(path_split[3])
- trimThreads()
- self.output = "done trimming"
- return
- elif path_split[2] == 'setexpires':
- board = setBoard(path_split[3])
- parentid = int(path_split[4])
- days = int(path_split[5])
- t = time.time()
-
- expires = int(t) + (days * 86400)
- date_format = '%d/%m'
- expires_formatted = datetime.datetime.fromtimestamp(expires).strftime(date_format)
-
- sql = "UPDATE posts SET expires = timestamp + (%s * 86400), expires_formatted = FROM_UNIXTIME((timestamp + (%s * 86400)), '%s') WHERE boardid = %s AND id = %s" % (str(days), str(days), date_format, board["id"], str(parentid))
- UpdateDb(sql)
-
- self.output = "done " + sql
- return
- elif path_split[2] == 'fixflood':
- if not administrator:
- return
- board = setBoard('zonavip')
- threads = FetchAll("SELECT * FROM posts WHERE boardid = %s AND parentid = 0 AND subject LIKE 'querido mod%%'" % board['id'])
- for thread in threads:
- self.output += "%s<br>" % thread['id']
- #deletePost(thread['id'], None)
- return
- elif path_split[2] == 'fixico':
- board = setBoard(path_split[3])
-
- threads = FetchAll("SELECT * FROM posts WHERE boardid = %s AND parentid = 0 AND message NOT LIKE '<img%%'" % board['id'])
- for t in threads:
- img_src = '<img src="%s" alt="ico" /><br />' % getRandomIco()
- newmessage = img_src + t["message"]
- #UpdateDb("UPDATE posts SET message = '%s' WHERE boardid = %s AND id = %s" % (_mysql.escape_string(newmessage), board['id'], t['id']))
-
- self.output = repr(threads)
- return
- elif path_split[2] == 'fixkako':
- board = setBoard(path_split[3])
-
- threads = FetchAll('SELECT * FROM archive WHERE boardid = %s ORDER BY timestamp DESC' % board['id'])
- for item in threads:
- t = time.time()
- self.output += item['timestamp'] + '<br />'
- fname = Settings.ROOT_DIR + board["dir"] + "/kako/" + str(item["timestamp"]) + ".json"
- if os.path.isfile(fname):
- import json
- with open(fname) as f:
- thread = json.load(f)
- thread['posts'] = [dict(zip(thread['keys'], row)) for row in thread['posts']]
- template_fname = "txt_archive.html"
-
- post_preview = cut_home_msg(thread['posts'][0]['message'], 0)
- page = renderTemplate("txt_archive.html", {"threads": [thread], "preview": post_preview}, False)
- with open(Settings.ROOT_DIR + board["dir"] + "/kako/" + str(thread['timestamp']) + ".html", "w") as f:
- f.write(page)
-
- self.output += 'done' + str(time.time() - t) + '<br />'
- else:
- self.output += 'El hilo no existe.<br />'
- elif path_split[2] == 'fixexpires':
- board = setBoard(path_split[3])
-
- if int(board["maxage"]):
- date_format = '%d/%m'
- date_format_y = '%m/%Y'
- if int(board["maxage"]) >= 365:
- date_format = date_format_y
- sql = "UPDATE posts SET expires = timestamp + (%s * 86400), expires_formatted = FROM_UNIXTIME((timestamp + (%s * 86400)), '%s') WHERE boardid = %s AND parentid = 0" % (board["maxage"], board["maxage"], date_format, board["id"])
- UpdateDb(sql)
-
- alert_time = int(round(int(board['maxage']) * Settings.MAX_AGE_ALERT))
- sql = "UPDATE posts SET expires_alert = CASE WHEN UNIX_TIMESTAMP() > (expires - %d*86400) THEN 1 ELSE 0 END WHERE boardid = %s AND parentid = 0" % (alert_time, board["id"])
- UpdateDb(sql)
- else:
- sql = "UPDATE posts SET expires = 0, expires_formatted = '', expires_alert = 0 WHERE boardid = %s AND parentid = 0" % (board["id"])
- UpdateDb(sql)
-
- self.output = "done"
- return
- elif path_split[2] == 'fixid':
- board = setBoard(path_split[3])
- posts = FetchAll('SELECT * FROM `posts` WHERE `boardid` = %s' % board['id'])
- self.output = "total: %d<br />" % len(posts)
- for post in posts:
- new_timestamp_formatted = formatTimestamp(post['timestamp'])
- tim = 0
- if board["useid"] != '0':
- new_timestamp_formatted += ' ID:' + iphash(post['ip'], '', tim, '1', False, False, False, '0')
- self.output += "%s - %s <br />" % (post['id'], new_timestamp_formatted)
- query = "UPDATE `posts` SET timestamp_formatted = '%s' WHERE boardid = '%s' AND id = '%s'" % (new_timestamp_formatted, board['id'], post['id'])
- UpdateDb(query)
- return
- elif path_split[2] == 'fixname':
- board = setBoard(path_split[3])
- #posts = FetchAll('SELECT * FROM `posts` WHERE `boardid` = %s' % board['id'])
- posts = FetchAll('SELECT * FROM `posts` WHERE `name` LIKE \'%s\'' % '%%')
- new_name = board['anonymous']
- self.output = new_name + "<br />"
- for post in posts:
- self.output += "%s<br />" % (post['id'])
- query = "UPDATE `posts` SET `name` = '%s' WHERE boardid = '%s' AND id = '%s'" % (new_name, board['id'], post['id'])
- UpdateDb(query)
- return
- elif path_split[2] == 'setsub':
- board = setBoard(path_split[3])
- thread = FetchOne('SELECT * FROM `posts` WHERE `parentid` = 0 AND `boardid` = %s' % board['id'])
- subject = str(path_split[4])
- self.output = subject + "->" + thread['id'] + "<br />"
- query = "UPDATE `posts` SET `subject` = '%s' WHERE boardid = '%s' AND id = '%s'" % (subject, board['id'], thread['id'])
- UpdateDb(query)
- return
- elif path_split[2] == 'fixlength':
- board = setBoard(path_split[3])
- threads = FetchAll('SELECT * FROM `posts` WHERE parentid = 0 AND `boardid` = %s' % board['id'])
- for t in threads:
- length = threadNumReplies(t['id'])
- UpdateDb('UPDATE posts SET length = %d WHERE boardid = %s AND id = %s' % (length, board['id'], t['id']))
-
- self.output='done'
- return
- elif path_split[2] == 'archive':
- t = time.time()
- board = setBoard(path_split[3])
- postid = int(path_split[4])
- archiveThread(postid)
- self.output = "todo ok %s" % str(time.time() - t)
- elif path_split[2] == 'filters':
- action_taken = False
- if len(path_split) > 3 and path_split[3] == 'add':
- if "add" in self.formdata.keys():
- edit_id = 0
- if 'edit' in self.formdata.keys():
- edit_id = int(self.formdata['edit'])
-
- # We decide what type of filter it is.
- # 0: Word / 1: Name/Trip
- filter_type = int(self.formdata["type"])
- filter_action = int(self.formdata["action"])
- filter_from = ''
- filter_tripcode = ''
-
- # I don't like pickles... oh well.
- where = ''
- if 'board_all' not in self.formdata.keys():
- where = []
- boards = FetchAll('SELECT `dir` FROM `boards`')
- for board in boards:
- keyname = 'board_' + board['dir']
- if keyname in self.formdata.keys():
- if self.formdata[keyname] == "1":
- where.append(board['dir'])
- if len(where) > 0:
- where = _mysql.escape_string(pickle.dumps(where))
- else:
- self.error(_("You must select what board the filter will affect"))
+ # List all boards
+ template_filename = "boardoptions.html"
+ template_values = {'mode': 0, 'boards': boardlist()}
+ elif path_split[2] == 'recyclebin':
+ if not administrator:
+ return
+
+ message = None
+ if len(path_split) > 5:
+ if path_split[4] == 'restore':
+ board = setBoard(path_split[5])
+
+ post = FetchOne('SELECT `parentid` FROM `posts` WHERE `boardid` = ' +
+ board['id'] + ' AND `id` = \'' + _mysql.escape_string(path_split[6]) + '\' LIMIT 1')
+ if not post:
+ message = _(
+ 'Unable to locate a post with that ID.') + '<br />'
+ template_filename = "message.html"
+ else:
+ UpdateDb('UPDATE `posts` SET `IS_DELETED` = 0 WHERE `boardid` = ' +
+ board['id'] + ' AND `id` = \'' + _mysql.escape_string(path_split[6]) + '\' LIMIT 1')
+ if post['parentid'] != '0':
+ threadUpdated(post['parentid'])
+ else:
+ regenerateFrontPages()
+
+ message = _('Post successfully restored.')
+ logAction(staff_account['username'], _('Restored post %s') % (
+ '/' + path_split[5] + '/' + path_split[6]))
+
+ if path_split[4] == 'delete':
+ board = setBoard(path_split[5])
+ post = FetchOne('SELECT id, parentid, message, INET_NTOA(ip) AS ip FROM `posts` WHERE `boardid` = ' +
+ board['id'] + ' AND `id` = \'' + _mysql.escape_string(path_split[6]) + '\' LIMIT 1')
+ if not post:
+ message = _(
+ 'Unable to locate a post with that ID.')
+ else:
+ deletePost(path_split[6], None)
+
+ if post['parentid'] != '0':
+ threadUpdated(post['parentid'])
+ else:
+ regenerateFrontPages()
+
+ message = "Eliminado post %s permanentemente." % (
+ '/' + board['dir'] + '/' + post['id'])
+ logAction(
+ staff_account['username'], message + ' Contenido: ' + post['message'] + ' IP: ' + post['ip'])
+
+ # Delete more than 1 post
+ if 'deleteall' in self.formdata.keys():
+ return # TODO
+ deleted = 0
+ for key in self.formdata.keys():
+ if key[:2] == '!i':
+ # Board where the post is
+ dir = key[2:].split('/')[0]
+ postid = key[2:].split('/')[1] # Post to delete
+
+ # Delete post start
+ post = FetchOne('SELECT `parentid`, `dir` FROM `posts` INNER JOIN `boards` ON posts.boardid = boards.id WHERE `dir` = \'' +
+ _mysql.escape_string(dir) + '\' AND posts.id = \'' + _mysql.escape_string(postid) + '\' LIMIT 1')
+ if not post:
+ message = _(
+ 'Unable to locate a post with that ID.')
+ else:
+ board = setBoard(dir)
+ deletePost(int(postid), None)
+ if post['parentid'] != '0':
+ threadUpdated(post['parentid'])
+ else:
+ regenerateFrontPages()
+ deleted += 1
+ # Delete post end
+
+ logAction(staff_account['username'], _(
+ 'Permadeleted %s post(s).') % str(deleted))
+ message = _('Permadeleted %s post(s).') % str(deleted)
+
+ # Start
+ import math
+ pagesize = float(Settings.RECYCLEBIN_POSTS_PER_PAGE)
+
+ try:
+ currentpage = int(path_split[3])
+ except:
+ currentpage = 0
+
+ skip = False
+ if 'type' in self.formdata.keys():
+ type = int(self.formdata["type"])
+ else:
+ type = 0
+
+ # Generate board list
+ boards = FetchAll(
+ 'SELECT `name`, `dir` FROM `boards` ORDER BY `dir`')
+ for board in boards:
+ if 'board' in self.formdata.keys() and self.formdata['board'] == board['dir']:
+ board['checked'] = True
+ else:
+ board['checked'] = False
+
+ # Get type filter
+ if type != 0:
+ type_condition = "= " + str(type)
+ else:
+ type_condition = "!= 0"
+
+ # Table
+ if 'board' in self.formdata.keys() and self.formdata['board'] != 'all':
+ cboard = self.formdata['board']
+ posts = FetchAll("SELECT posts.id, posts.timestamp, timestamp_formatted, IS_DELETED, INET_NTOA(posts.ip) as ip, posts.message, dir, boardid FROM `posts` INNER JOIN `boards` ON boardid = boards.id WHERE `dir` = '%s' AND IS_DELETED %s ORDER BY `timestamp` DESC LIMIT %d, %d" % (
+ _mysql.escape_string(self.formdata['board']), _mysql.escape_string(type_condition), currentpage*pagesize, pagesize))
+ try:
+ totals = FetchOne("SELECT COUNT(id) FROM `posts` WHERE IS_DELETED %s AND `boardid` = %s" % (
+ _mysql.escape_string(type_condition), _mysql.escape_string(posts[0]['boardid'])), 0)
+ except:
+ skip = True
+ else:
+ cboard = 'all'
+ posts = FetchAll("SELECT posts.id, posts.timestamp, timestamp_formatted, IS_DELETED, posts.ip, posts.message, dir FROM `posts` INNER JOIN `boards` ON boardid = boards.id WHERE IS_DELETED %s ORDER BY `timestamp` DESC LIMIT %d, %d" % (
+ _mysql.escape_string(type_condition), currentpage*pagesize, pagesize))
+ totals = FetchOne("SELECT COUNT(id) FROM `posts` WHERE IS_DELETED %s" %
+ _mysql.escape_string(type_condition), 0)
+
+ template_filename = "recyclebin.html"
+ template_values = {'message': message,
+ 'type': type,
+ 'boards': boards,
+ 'skip': skip}
+
+ if not skip:
+ # Calculate number of pages
+ total = int(totals[0])
+ pages = int(math.ceil(total / pagesize))
+
+ # Create delete form
+ if 'board' in self.formdata.keys():
+ board = self.formdata['board']
+ else:
+ board = None
+
+ navigator = ''
+ if currentpage > 0:
+ navigator += '<a href="'+Settings.CGI_URL+'manage/recyclebin/' + \
+ str(currentpage-1)+'?type='+str(type) + \
+ '&amp;board='+cboard+'">&lt;</a> '
+ else:
+ navigator += '&lt; '
+
+ for i in range(pages):
+ if i != currentpage:
+ navigator += '<a href="'+Settings.CGI_URL+'manage/recyclebin/' + \
+ str(i)+'?type='+str(type)+'&amp;board=' + \
+ cboard+'">'+str(i)+'</a> '
+ else:
+ navigator += str(i)+' '
+
+ if currentpage < (pages-1):
+ navigator += '<a href="'+Settings.CGI_URL+'manage/recyclebin/' + \
+ str(currentpage+1)+'?type='+str(type) + \
+ '&amp;board='+cboard+'">&gt;</a> '
+ else:
+ navigator += '&gt; '
+
+ template_values.update({'currentpage': currentpage,
+ 'curboard': board,
+ 'posts': posts,
+ 'navigator': navigator})
+ # End recyclebin
+ elif path_split[2] == 'lockboard':
+ if not administrator:
+ return
+
+ try:
+ board_dir = path_split[3]
+ except:
+ board_dir = ''
+
+ if board_dir == '':
+ template_filename = "lockboard.html"
+ template_values = {'boards': boardlist()}
+ elif path_split[2] == 'boardlock':
+ board = setBoard(path_split[3])
+ if int(board['locked']):
+ # Si esta cerrado... abrir
+ board['locked'] = 0
+ updateBoardSettings()
+ message = _('Board opened successfully.')
+ template_filename = "message.html"
+ else:
+ # Si esta abierta, cerrar
+ board['locked'] = 1
+ updateBoardSettings()
+ message = _('Board closed successfully.')
+ template_filename = "message.html"
+ elif path_split[2] == 'addboard':
+ if not administrator:
+ return
+
+ action_taken = False
+ board_dir = ''
+
+ try:
+ if self.formdata['name'] != '':
+ board_dir = self.formdata['dir']
+ except:
+ pass
+
+ if board_dir != '':
+ action_taken = True
+ board_exists = FetchOne(
+ 'SELECT * FROM `boards` WHERE `dir` = \'' + _mysql.escape_string(board_dir) + '\' LIMIT 1')
+ if not board_exists:
+ os.mkdir(Settings.ROOT_DIR + board_dir)
+ os.mkdir(Settings.ROOT_DIR + board_dir + '/res')
+ if not os.path.exists(Settings.IMAGES_DIR + board_dir):
+ os.mkdir(Settings.IMAGES_DIR + board_dir)
+ os.mkdir(Settings.IMAGES_DIR + board_dir + '/src')
+ os.mkdir(Settings.IMAGES_DIR + board_dir + '/thumb')
+ os.mkdir(Settings.IMAGES_DIR + board_dir + '/mobile')
+ os.mkdir(Settings.IMAGES_DIR + board_dir + '/cat')
+ if os.path.exists(Settings.ROOT_DIR + board_dir) and os.path.isdir(Settings.ROOT_DIR + board_dir):
+ UpdateDb('INSERT INTO `boards` (`dir`, `name`) VALUES (\'' + _mysql.escape_string(
+ board_dir) + '\', \'' + _mysql.escape_string(self.formdata['name']) + '\')')
+ board = setBoard(board_dir)
+ f = open(Settings.ROOT_DIR +
+ board['dir'] + '/.htaccess', 'w')
+ try:
+ f.write('DirectoryIndex index.html')
+ finally:
+ f.close()
+ regenerateFrontPages()
+ message = _('Board added')
+ template_filename = "message.html"
+ logAction(staff_account['username'], _(
+ 'Added board %s') % ('/' + board['dir'] + '/'))
+ else:
+ message = _(
+ 'There was a problem while making the directories.')
+ template_filename = "message.html"
+ else:
+ message = _(
+ 'There is already a board with that directory.')
+ template_filename = "message.html"
+
+ if not action_taken:
+ template_filename = "addboard.html"
+ template_values = {}
+ elif path_split[2] == 'trim':
+ if not administrator:
+ return
+ board = setBoard(path_split[3])
+ trimThreads()
+ self.output = "done trimming"
return
-
- if filter_type == 0:
- # Word filter
- if len(self.formdata["word"]) > 0:
- filter_from = _mysql.escape_string(cgi.escape(self.formdata["word"]))
- else:
- self.error(_("You must enter a word."))
+ elif path_split[2] == 'setexpires':
+ board = setBoard(path_split[3])
+ parentid = int(path_split[4])
+ days = int(path_split[5])
+ t = time.time()
+
+ expires = int(t) + (days * 86400)
+ date_format = '%d/%m'
+ expires_formatted = datetime.datetime.fromtimestamp(
+ expires).strftime(date_format)
+
+ sql = "UPDATE posts SET expires = timestamp + (%s * 86400), expires_formatted = FROM_UNIXTIME((timestamp + (%s * 86400)), '%s') WHERE boardid = %s AND id = %s" % (
+ str(days), str(days), date_format, board["id"], str(parentid))
+ UpdateDb(sql)
+
+ self.output = "done " + sql
return
- elif filter_type == 1:
- # Name/trip filter
- can_add = False
- if len(self.formdata["name"]) > 0:
- filter_from = _mysql.escape_string(self.formdata["name"])
- can_add = True
- if len(self.formdata["trip"]) > 0:
- filter_tripcode = _mysql.escape_string(self.formdata["trip"])
- can_add = True
- if not can_add:
- self.error(_("You must enter a name and/or a tripcode."))
+ elif path_split[2] == 'fixflood':
+ if not administrator:
+ return
+ board = setBoard('zonavip')
+ threads = FetchAll(
+ "SELECT * FROM posts WHERE boardid = %s AND parentid = 0 AND subject LIKE 'querido mod%%'" % board['id'])
+ for thread in threads:
+ self.output += "%s<br>" % thread['id']
+ #deletePost(thread['id'], None)
return
-
- # Action
- sql_query = ''
- filter_reason = ''
- if len(self.formdata["reason"]) > 0:
- filter_reason = _mysql.escape_string(self.formdata["reason"])
- if filter_action == 0:
- # Cancel post
- sql_query = "INSERT INTO `filters` (`id`, `boards`, `type`, `action`, `from`, `from_trip`, `reason`, `added`, `staff`) VALUES (%d, '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s')" % \
- (edit_id, where, str(filter_type), str(filter_action), filter_from, filter_tripcode, filter_reason, str(timestamp()), _mysql.escape_string(staff_account['username']))
- elif filter_action == 1:
- # Change to
- if len(self.formdata["changeto"]) > 0:
- filter_to = _mysql.escape_string(self.formdata["changeto"])
- sql_query = "INSERT INTO `filters` (`id`, `boards`, `type`, `action`, `from`, `from_trip`, `reason`, `to`, `added`, `staff`) VALUES (%d, '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s')" % \
- (edit_id, where, str(filter_type), str(filter_action), filter_from, filter_tripcode, filter_reason, filter_to, str(timestamp()), _mysql.escape_string(staff_account['username']))
- else:
- self.error(_("You must enter a word to change to."))
+ elif path_split[2] == 'fixico':
+ board = setBoard(path_split[3])
+
+ threads = FetchAll(
+ "SELECT * FROM posts WHERE boardid = %s AND parentid = 0 AND message NOT LIKE '<img%%'" % board['id'])
+ for t in threads:
+ img_src = '<img src="%s" alt="ico" /><br />' % getRandomIco()
+ newmessage = img_src + t["message"]
+ #UpdateDb("UPDATE posts SET message = '%s' WHERE boardid = %s AND id = %s" % (_mysql.escape_string(newmessage), board['id'], t['id']))
+
+ self.output = repr(threads)
return
- elif filter_action == 2:
- # Ban
- filter_seconds = '0'
- if len(self.formdata["seconds"]) > 0:
- filter_seconds = _mysql.escape_string(self.formdata["seconds"])
- if "blind" in self.formdata.keys() and self.formdata["blind"] == '1':
- filter_blind = '1'
- else:
- filter_blind = '2'
-
- sql_query = "INSERT INTO `filters` (`id`, `boards`, `type`, `action`, `from`, `from_trip`, `reason`, `seconds`, `blind`, `added`, `staff`) VALUES (%d, '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s')" % \
- (edit_id, where, str(filter_type), str(filter_action), filter_from, filter_tripcode, filter_reason, filter_seconds, filter_blind, str(timestamp()), _mysql.escape_string(staff_account['username']))
- elif filter_action == 3:
- # Redirect URL
- if len(self.formdata['redirect_url']) > 0:
- redirect_url = _mysql.escape_string(self.formdata['redirect_url'])
- redirect_time = 0
- try:
- redirect_time = int(self.formdata['redirect_time'])
- except:
- pass
- else:
- self.error(_("You must enter a URL to redirect to."))
+ elif path_split[2] == 'fixkako':
+ board = setBoard(path_split[3])
+
+ threads = FetchAll(
+ 'SELECT * FROM archive WHERE boardid = %s ORDER BY timestamp DESC' % board['id'])
+ for item in threads:
+ t = time.time()
+ self.output += item['timestamp'] + '<br />'
+ fname = Settings.ROOT_DIR + \
+ board["dir"] + "/kako/" + \
+ str(item["timestamp"]) + ".json"
+ if os.path.isfile(fname):
+ import json
+ with open(fname) as f:
+ thread = json.load(f)
+ thread['posts'] = [
+ dict(zip(thread['keys'], row)) for row in thread['posts']]
+ template_fname = "txt_archive.html"
+
+ post_preview = cut_home_msg(
+ thread['posts'][0]['message'], 0)
+ page = renderTemplate("txt_archive.html", {"threads": [
+ thread], "preview": post_preview}, False)
+ with open(Settings.ROOT_DIR + board["dir"] + "/kako/" + str(thread['timestamp']) + ".html", "w") as f:
+ f.write(page)
+
+ self.output += 'done' + str(time.time() - t) + '<br />'
+ else:
+ self.output += 'El hilo no existe.<br />'
+ elif path_split[2] == 'fixexpires':
+ board = setBoard(path_split[3])
+
+ if int(board["maxage"]):
+ date_format = '%d/%m'
+ date_format_y = '%m/%Y'
+ if int(board["maxage"]) >= 365:
+ date_format = date_format_y
+ sql = "UPDATE posts SET expires = timestamp + (%s * 86400), expires_formatted = FROM_UNIXTIME((timestamp + (%s * 86400)), '%s') WHERE boardid = %s AND parentid = 0" % (
+ board["maxage"], board["maxage"], date_format, board["id"])
+ UpdateDb(sql)
+
+ alert_time = int(
+ round(int(board['maxage']) * Settings.MAX_AGE_ALERT))
+ sql = "UPDATE posts SET expires_alert = CASE WHEN UNIX_TIMESTAMP() > (expires - %d*86400) THEN 1 ELSE 0 END WHERE boardid = %s AND parentid = 0" % (alert_time,
+ board["id"])
+ UpdateDb(sql)
+ else:
+ sql = "UPDATE posts SET expires = 0, expires_formatted = '', expires_alert = 0 WHERE boardid = %s AND parentid = 0" % (
+ board["id"])
+ UpdateDb(sql)
+
+ self.output = "done"
return
-
- sql_query = "INSERT INTO `filters` (`id`, `boards`, `type`, `action`, `from`, `from_trip`, `reason`, `redirect_url`, `redirect_time`, `added`, `staff`) VALUES (%d, '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s')" % \
- (edit_id, where, str(filter_type), str(filter_action), filter_from, filter_tripcode, filter_reason, redirect_url, str(redirect_time), str(timestamp()), _mysql.escape_string(staff_account['username']))
- # DO QUERY!
- if edit_id > 0:
- UpdateDb("DELETE FROM `filters` WHERE `id` = %s" % str(edit_id))
- UpdateDb(sql_query)
- message = 'Filter edited.'
- else:
- filt = FetchOne("SELECT `id` FROM `filters` WHERE `boards` = '%s' AND `type` = '%s' AND `from` = '%s'" % (where, str(filter_type), filter_from))
- if not filt:
- UpdateDb(sql_query)
- message = 'Filter added.'
- else:
- message = 'This filter already exists here:' + ' <a href="'+Settings.CGI_URL+'manage/filters/add?edit='+filt['id']+'">edit</a>'
- action_taken = True
- template_filename = "message.html"
- else:
- # Create add form
- edit_id = 0
- if 'edit' in self.formdata.keys() and int(self.formdata['edit']) > 0:
- # Load values
- edit_id = int(self.formdata['edit'])
- filt = FetchOne("SELECT * FROM `filters` WHERE `id` = %s LIMIT 1" % str(edit_id))
- if filt['boards'] == '':
- where = ''
- else:
- where = pickle.loads(filt['boards'])
- startvalues = {'type': filt['type'],
- 'trip': filt['from_trip'],
- 'where': where,
- 'action': filt['action'],
- 'changeto': cgi.escape(filt['to'], True),
- 'reason': filt['reason'],
- 'seconds': filt['seconds'],
- 'blind': filt['blind'],
- 'redirect_url': filt['redirect_url'],
- 'redirect_time': filt['redirect_time'],}
- if filt['type'] == '1':
- startvalues['name'] = filt['from']
- startvalues['word'] = ''
- else:
- startvalues['name'] = ''
- startvalues['word'] = filt['from']
- else:
- startvalues = {'type': '0',
- 'word': '',
- 'name': '',
- 'trip': '',
- 'where': [],
- 'action': '0',
- 'changeto': '',
- 'reason': _('Forbidden word'),
- 'seconds': '0',
- 'blind': '1',
- 'redirect_url': 'http://',
- 'redirect_time': '5'}
-
- if edit_id > 0:
- submit = "Editar Filtro"
- else:
- submit = "Agregar filtro"
-
- action_taken = True
- template_filename = "filters.html"
- template_values = {'mode': 1,
- 'edit_id': edit_id,
- 'boards': boardlist(),
- 'startvalues': startvalues,
- 'submit': submit}
- elif len(path_split) > 4 and path_split[3] == 'delete':
- delid = int(path_split[4])
- UpdateDb("DELETE FROM `filters` WHERE id = '%s' LIMIT 1" % str(delid))
- message = _('Deleted filter %s.') % str(delid)
- template_filename = "message.html"
- action_taken = True
-
- if not action_taken:
- filters = FetchAll("SELECT * FROM `filters` ORDER BY `added` DESC")
- for filter in filters:
- if filter['boards'] == '':
- filter['boards'] = _('All boards')
- else:
- where = pickle.loads(filter['boards'])
- if len(where) > 1:
- filter['boards'] = '/' + '/, /'.join(where) + '/'
- else:
- filter['boards'] = '/' + where[0] + '/'
- if filter['type'] == '0':
- filter['type_formatted'] = _('Word:') + ' <b>' + cgi.escape(filter['from']) + '</b>'
- elif filter['type'] == '1':
- filter['type_formatted'] = _('Name/Tripcode:')+' '
- if filter['from'] != '':
- filter['type_formatted'] += '<b class="name">' + filter['from'] + '</b>'
- if filter['from_trip'] != '':
- filter['type_formatted'] += '<span class="trip">' + filter['from_trip'] + '</span>'
- else:
- filter['type_formatted'] = '?'
- if filter['action'] == '0':
- filter ['action_formatted'] = _('Abort post')
- elif filter['action'] == '1':
- filter ['action_formatted'] = _('Change to:') + ' <b>' + cgi.escape(filter['to']) + '</b>'
- elif filter['action'] == '2':
- if filter['blind'] == '1':
- blind = _('Yes')
- else:
- blind = _('No')
- filter ['action_formatted'] = _('Autoban:') + '<br />' + \
- (_('Length:')+' <i>%s</i><br />'+_('Blind:')+' <i>%s</i>') % (filter['seconds'], blind)
- elif filter['action'] == '3':
- filter ['action_formatted'] = (_('Redirect to:')+' %s ('+_('in %s secs')+')') % (filter['redirect_url'], filter['redirect_time'])
- else:
- filter ['action_formatted'] = '?'
- filter['added'] = formatTimestamp(filter['added'])
-
- template_filename = "filters.html"
- template_values = {'mode': 0, 'filters': filters}
- elif path_split[2] == 'logs':
- if staff_account['rights'] != '0' and staff_account['rights'] != '2':
- return
-
- logs = FetchAll('SELECT * FROM `logs` ORDER BY `timestamp` DESC')
- for log in logs:
- log['timestamp_formatted'] = formatTimestamp(log['timestamp'])
- template_filename = "logs.html"
- template_values = {'logs': logs}
- elif path_split[2] == 'logout':
- message = _('Logging out...') + '<meta http-equiv="refresh" content="0;url=' + Settings.CGI_URL + 'manage" />'
- deleteCookie(self, 'weabot_manage')
- deleteSession(staff_account['session_id'])
- template_filename = "message.html"
- elif path_split[2] == 'quotes':
- # Quotes for the post screen
- if "save" in self.formdata.keys():
- try:
- f = open('quotes.conf', 'w')
- f.write(self.formdata["data"])
- f.close()
- message = 'Datos guardados.'
- template_filename = "message.html"
- except:
- message = 'Error al guardar datos.'
- template_filename = "message.html"
- try:
- f = open('quotes.conf', 'r')
- data = cgi.escape(f.read(1048576), True)
- f.close()
- template_filename = "quotes.html"
- template_values = {'data': data}
- except:
- message = 'Error al leer datos.'
- template_filename = 'message.html'
- elif path_split[2] == 'recent_images':
- try:
- if int(self.formdata['images']) > 100:
- images = '100'
- else:
- images = self.formdata['images']
- posts = FetchAll('SELECT * FROM `posts` INNER JOIN `boards` ON boardid = boards.id WHERE CHAR_LENGTH(`thumb`) > 0 ORDER BY `timestamp` DESC LIMIT ' + _mysql.escape_string(images))
- except:
- posts = FetchAll('SELECT * FROM `posts` INNER JOIN `boards` ON boardid = boards.id WHERE CHAR_LENGTH(`thumb`) > 0 ORDER BY `timestamp` DESC LIMIT 10')
- template_filename = "recent_images.html"
- template_values = {'posts': posts}
- elif path_split[2] == 'news':
- if not administrator:
- return
-
- type = 1
- if 'type' in self.formdata:
- type = int(self.formdata['type'])
-
- if type > 2:
- raise UserError, "Tipo no soportado"
-
- # canal del home
- if len(path_split) > 3:
- if path_split[3] == 'add':
- t = datetime.datetime.now()
-
- # Insertar el nuevo post
- title = ''
- message = self.formdata["message"].replace("\n", "<br />")
-
- # Titulo
- if 'title' in self.formdata:
- title = self.formdata["title"]
-
- # Post anonimo
- if 'anonymous' in self.formdata.keys() and self.formdata['anonymous'] == '1':
- to_name = "Staff ★"
- else:
- to_name = "%s ★" % staff_account['username']
- timestamp_formatted = formatDate(t)
- if type > 0:
- timestamp_formatted = re.sub(r"\(.+", "", timestamp_formatted)
- else:
- timestamp_formatted = re.sub(r"\(...\)", " ", timestamp_formatted)
-
- UpdateDb("INSERT INTO `news` (type, staffid, staff_name, title, message, name, timestamp, timestamp_formatted) VALUES (%d, '%s', '%s', '%s', '%s', '%s', '%d', '%s')" % (type, staff_account['id'], staff_account['username'], _mysql.escape_string(title), _mysql.escape_string(message), to_name, timestamp(t), timestamp_formatted))
-
- regenerateNews()
- regenerateHome()
- message = _("Added successfully.")
- template_filename = "message.html"
- if path_split[3] == 'delete':
- # Eliminar un post
- id = int(path_split[4])
- UpdateDb("DELETE FROM `news` WHERE id = %d AND type = %d" % (id, type))
- regenerateNews()
- regenerateHome()
- message = _("Deleted successfully.")
- template_filename = "message.html"
- else:
- posts = FetchAll("SELECT * FROM `news` WHERE type = %d ORDER BY `timestamp` DESC" % type)
- template_filename = "news.html"
- template_values = {'action': type, 'posts': posts}
- elif path_split[2] == 'newschannel':
- #if not administrator:
- # return
-
- if len(path_split) > 3:
- if path_split[3] == 'add':
- t = datetime.datetime.now()
- # Delete old posts
- #posts = FetchAll("SELECT `id` FROM `news` WHERE `type` = '1' ORDER BY `timestamp` DESC LIMIT "+str(Settings.MODNEWS_MAX_POSTS)+",30")
- #for post in posts:
- # UpdateDb("DELETE FROM `news` WHERE id = " + post['id'] + " AND `type` = '0'")
-
- # Insert new post
- message = ''
- try:
- # Cut long lines
- message = self.formdata["message"]
- message = clickableURLs(cgi.escape(message).rstrip()[0:8000])
- message = onlyAllowedHTML(message)
- if Settings.USE_MARKDOWN:
- message = markdown(message)
- if not Settings.USE_MARKDOWN:
- message = message.replace("\n", "<br />")
- except:
- pass
-
- # If it's preferred to remain anonymous...
- if 'anonymous' in self.formdata.keys() and self.formdata['anonymous'] == '1':
- to_name = "Staff ★"
- else:
- to_name = "%s ★" % staff_account['username']
- timestamp_formatted = formatDate(t)
-
- UpdateDb("INSERT INTO `news` (type, staffid, staff_name, title, message, name, timestamp, timestamp_formatted) VALUES ('0', '%s', '%s', '%s', '%s', '%s', '%d', '%s')" % (staff_account['id'], staff_account['username'], _mysql.escape_string(self.formdata['title']), _mysql.escape_string(message), to_name, timestamp(t), timestamp_formatted))
-
- message = _("Added successfully.")
- template_filename = "message.html"
- if path_split[3] == 'delete':
- if not administrator:
- # We check that if he's not admin, he shouldn't be able to delete other people's posts
- post = FetchOne("SELECT `staffid` FROM `news` WHERE id = '"+_mysql.escape_string(path_split[4])+"' AND type = '0'")
- if post['staffid'] != staff_account['id']:
- self.error(_('That post is not yours.'))
+ elif path_split[2] == 'fixid':
+ board = setBoard(path_split[3])
+ posts = FetchAll(
+ 'SELECT * FROM `posts` WHERE `boardid` = %s' % board['id'])
+ self.output = "total: %d<br />" % len(posts)
+ for post in posts:
+ new_timestamp_formatted = formatTimestamp(
+ post['timestamp'])
+ tim = 0
+ if board["useid"] != '0':
+ new_timestamp_formatted += ' ID:' + \
+ iphash(post['ip'], '', tim, '1',
+ False, False, False, '0')
+ self.output += "%s - %s <br />" % (
+ post['id'], new_timestamp_formatted)
+ query = "UPDATE `posts` SET timestamp_formatted = '%s' WHERE boardid = '%s' AND id = '%s'" % (
+ new_timestamp_formatted, board['id'], post['id'])
+ UpdateDb(query)
+ return
+ elif path_split[2] == 'fixname':
+ board = setBoard(path_split[3])
+ #posts = FetchAll('SELECT * FROM `posts` WHERE `boardid` = %s' % board['id'])
+ posts = FetchAll(
+ 'SELECT * FROM `posts` WHERE `name` LIKE \'%s\'' % '%%')
+ new_name = board['anonymous']
+ self.output = new_name + "<br />"
+ for post in posts:
+ self.output += "%s<br />" % (post['id'])
+ query = "UPDATE `posts` SET `name` = '%s' WHERE boardid = '%s' AND id = '%s'" % (
+ new_name, board['id'], post['id'])
+ UpdateDb(query)
return
-
- # Delete!
- UpdateDb("DELETE FROM `news` WHERE id = '" + _mysql.escape_string(path_split[4]) + "' AND type = '0'")
- message = _("Deleted successfully.")
- template_filename = "message.html"
+ elif path_split[2] == 'setsub':
+ board = setBoard(path_split[3])
+ thread = FetchOne(
+ 'SELECT * FROM `posts` WHERE `parentid` = 0 AND `boardid` = %s' % board['id'])
+ subject = str(path_split[4])
+ self.output = subject + "->" + thread['id'] + "<br />"
+ query = "UPDATE `posts` SET `subject` = '%s' WHERE boardid = '%s' AND id = '%s'" % (
+ subject, board['id'], thread['id'])
+ UpdateDb(query)
+ return
+ elif path_split[2] == 'fixlength':
+ board = setBoard(path_split[3])
+ threads = FetchAll(
+ 'SELECT * FROM `posts` WHERE parentid = 0 AND `boardid` = %s' % board['id'])
+ for t in threads:
+ length = threadNumReplies(t['id'])
+ UpdateDb('UPDATE posts SET length = %d WHERE boardid = %s AND id = %s' % (
+ length, board['id'], t['id']))
+
+ self.output = 'done'
+ return
+ elif path_split[2] == 'archive':
+ t = time.time()
+ board = setBoard(path_split[3])
+ postid = int(path_split[4])
+ archiveThread(postid)
+ self.output = "todo ok %s" % str(time.time() - t)
+ elif path_split[2] == 'filters':
+ action_taken = False
+ if len(path_split) > 3 and path_split[3] == 'add':
+ if "add" in self.formdata.keys():
+ edit_id = 0
+ if 'edit' in self.formdata.keys():
+ edit_id = int(self.formdata['edit'])
+
+ # We decide what type of filter it is.
+ # 0: Word / 1: Name/Trip
+ filter_type = int(self.formdata["type"])
+ filter_action = int(self.formdata["action"])
+ filter_from = ''
+ filter_tripcode = ''
+
+ # I don't like pickles... oh well.
+ where = ''
+ if 'board_all' not in self.formdata.keys():
+ where = []
+ boards = FetchAll('SELECT `dir` FROM `boards`')
+ for board in boards:
+ keyname = 'board_' + board['dir']
+ if keyname in self.formdata.keys():
+ if self.formdata[keyname] == "1":
+ where.append(board['dir'])
+ if len(where) > 0:
+ where = _mysql.escape_string(
+ pickle.dumps(where))
+ else:
+ self.error(
+ _("You must select what board the filter will affect"))
+ return
+
+ if filter_type == 0:
+ # Word filter
+ if len(self.formdata["word"]) > 0:
+ filter_from = _mysql.escape_string(
+ cgi.escape(self.formdata["word"]))
+ else:
+ self.error(_("You must enter a word."))
+ return
+ elif filter_type == 1:
+ # Name/trip filter
+ can_add = False
+ if len(self.formdata["name"]) > 0:
+ filter_from = _mysql.escape_string(
+ self.formdata["name"])
+ can_add = True
+ if len(self.formdata["trip"]) > 0:
+ filter_tripcode = _mysql.escape_string(
+ self.formdata["trip"])
+ can_add = True
+ if not can_add:
+ self.error(
+ _("You must enter a name and/or a tripcode."))
+ return
+
+ # Action
+ sql_query = ''
+ filter_reason = ''
+ if len(self.formdata["reason"]) > 0:
+ filter_reason = _mysql.escape_string(
+ self.formdata["reason"])
+ if filter_action == 0:
+ # Cancel post
+ sql_query = "INSERT INTO `filters` (`id`, `boards`, `type`, `action`, `from`, `from_trip`, `reason`, `added`, `staff`) VALUES (%d, '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s')" % \
+ (edit_id, where, str(filter_type), str(filter_action), filter_from, filter_tripcode, filter_reason, str(
+ timestamp()), _mysql.escape_string(staff_account['username']))
+ elif filter_action == 1:
+ # Change to
+ if len(self.formdata["changeto"]) > 0:
+ filter_to = _mysql.escape_string(
+ self.formdata["changeto"])
+ sql_query = "INSERT INTO `filters` (`id`, `boards`, `type`, `action`, `from`, `from_trip`, `reason`, `to`, `added`, `staff`) VALUES (%d, '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s')" % \
+ (edit_id, where, str(filter_type), str(filter_action), filter_from, filter_tripcode, filter_reason, filter_to, str(
+ timestamp()), _mysql.escape_string(staff_account['username']))
+ else:
+ self.error(
+ _("You must enter a word to change to."))
+ return
+ elif filter_action == 2:
+ # Ban
+ filter_seconds = '0'
+ if len(self.formdata["seconds"]) > 0:
+ filter_seconds = _mysql.escape_string(
+ self.formdata["seconds"])
+ if "blind" in self.formdata.keys() and self.formdata["blind"] == '1':
+ filter_blind = '1'
+ else:
+ filter_blind = '2'
+
+ sql_query = "INSERT INTO `filters` (`id`, `boards`, `type`, `action`, `from`, `from_trip`, `reason`, `seconds`, `blind`, `added`, `staff`) VALUES (%d, '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s')" % \
+ (edit_id, where, str(filter_type), str(filter_action), filter_from, filter_tripcode, filter_reason,
+ filter_seconds, filter_blind, str(timestamp()), _mysql.escape_string(staff_account['username']))
+ elif filter_action == 3:
+ # Redirect URL
+ if len(self.formdata['redirect_url']) > 0:
+ redirect_url = _mysql.escape_string(
+ self.formdata['redirect_url'])
+ redirect_time = 0
+ try:
+ redirect_time = int(
+ self.formdata['redirect_time'])
+ except:
+ pass
+ else:
+ self.error(
+ _("You must enter a URL to redirect to."))
+ return
+
+ sql_query = "INSERT INTO `filters` (`id`, `boards`, `type`, `action`, `from`, `from_trip`, `reason`, `redirect_url`, `redirect_time`, `added`, `staff`) VALUES (%d, '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s')" % \
+ (edit_id, where, str(filter_type), str(filter_action), filter_from, filter_tripcode, filter_reason, redirect_url, str(
+ redirect_time), str(timestamp()), _mysql.escape_string(staff_account['username']))
+ # DO QUERY!
+ if edit_id > 0:
+ UpdateDb(
+ "DELETE FROM `filters` WHERE `id` = %s" % str(edit_id))
+ UpdateDb(sql_query)
+ message = 'Filter edited.'
+ else:
+ filt = FetchOne("SELECT `id` FROM `filters` WHERE `boards` = '%s' AND `type` = '%s' AND `from` = '%s'" % (
+ where, str(filter_type), filter_from))
+ if not filt:
+ UpdateDb(sql_query)
+ message = 'Filter added.'
+ else:
+ message = 'This filter already exists here:' + ' <a href="' + \
+ Settings.CGI_URL+'manage/filters/add?edit=' + \
+ filt['id']+'">edit</a>'
+ action_taken = True
+ template_filename = "message.html"
+ else:
+ # Create add form
+ edit_id = 0
+ if 'edit' in self.formdata.keys() and int(self.formdata['edit']) > 0:
+ # Load values
+ edit_id = int(self.formdata['edit'])
+ filt = FetchOne(
+ "SELECT * FROM `filters` WHERE `id` = %s LIMIT 1" % str(edit_id))
+ if filt['boards'] == '':
+ where = ''
+ else:
+ where = pickle.loads(filt['boards'])
+ startvalues = {'type': filt['type'],
+ 'trip': filt['from_trip'],
+ 'where': where,
+ 'action': filt['action'],
+ 'changeto': cgi.escape(filt['to'], True),
+ 'reason': filt['reason'],
+ 'seconds': filt['seconds'],
+ 'blind': filt['blind'],
+ 'redirect_url': filt['redirect_url'],
+ 'redirect_time': filt['redirect_time'], }
+ if filt['type'] == '1':
+ startvalues['name'] = filt['from']
+ startvalues['word'] = ''
+ else:
+ startvalues['name'] = ''
+ startvalues['word'] = filt['from']
+ else:
+ startvalues = {'type': '0',
+ 'word': '',
+ 'name': '',
+ 'trip': '',
+ 'where': [],
+ 'action': '0',
+ 'changeto': '',
+ 'reason': _('Forbidden word'),
+ 'seconds': '0',
+ 'blind': '1',
+ 'redirect_url': 'http://',
+ 'redirect_time': '5'}
+
+ if edit_id > 0:
+ submit = "Editar Filtro"
+ else:
+ submit = "Agregar filtro"
+
+ action_taken = True
+ template_filename = "filters.html"
+ template_values = {'mode': 1,
+ 'edit_id': edit_id,
+ 'boards': boardlist(),
+ 'startvalues': startvalues,
+ 'submit': submit}
+ elif len(path_split) > 4 and path_split[3] == 'delete':
+ delid = int(path_split[4])
+ UpdateDb(
+ "DELETE FROM `filters` WHERE id = '%s' LIMIT 1" % str(delid))
+ message = _('Deleted filter %s.') % str(delid)
+ template_filename = "message.html"
+ action_taken = True
+
+ if not action_taken:
+ filters = FetchAll(
+ "SELECT * FROM `filters` ORDER BY `added` DESC")
+ for filter in filters:
+ if filter['boards'] == '':
+ filter['boards'] = _('All boards')
+ else:
+ where = pickle.loads(filter['boards'])
+ if len(where) > 1:
+ filter['boards'] = '/' + \
+ '/, /'.join(where) + '/'
+ else:
+ filter['boards'] = '/' + where[0] + '/'
+ if filter['type'] == '0':
+ filter['type_formatted'] = _(
+ 'Word:') + ' <b>' + cgi.escape(filter['from']) + '</b>'
+ elif filter['type'] == '1':
+ filter['type_formatted'] = _('Name/Tripcode:')+' '
+ if filter['from'] != '':
+ filter['type_formatted'] += '<b class="name">' + \
+ filter['from'] + '</b>'
+ if filter['from_trip'] != '':
+ filter['type_formatted'] += '<span class="trip">' + \
+ filter['from_trip'] + '</span>'
+ else:
+ filter['type_formatted'] = '?'
+ if filter['action'] == '0':
+ filter['action_formatted'] = _('Abort post')
+ elif filter['action'] == '1':
+ filter['action_formatted'] = _(
+ 'Change to:') + ' <b>' + cgi.escape(filter['to']) + '</b>'
+ elif filter['action'] == '2':
+ if filter['blind'] == '1':
+ blind = _('Yes')
+ else:
+ blind = _('No')
+ filter['action_formatted'] = _('Autoban:') + '<br />' + \
+ (_('Length:')+' <i>%s</i><br />'+_('Blind:') +
+ ' <i>%s</i>') % (filter['seconds'], blind)
+ elif filter['action'] == '3':
+ filter['action_formatted'] = (_('Redirect to:')+' %s ('+_('in %s secs')+')') % (
+ filter['redirect_url'], filter['redirect_time'])
+ else:
+ filter['action_formatted'] = '?'
+ filter['added'] = formatTimestamp(filter['added'])
+
+ template_filename = "filters.html"
+ template_values = {'mode': 0, 'filters': filters}
+ elif path_split[2] == 'logs':
+ if staff_account['rights'] != '0' and staff_account['rights'] != '2':
+ return
+
+ logs = FetchAll(
+ 'SELECT * FROM `logs` ORDER BY `timestamp` DESC')
+ for log in logs:
+ log['timestamp_formatted'] = formatTimestamp(
+ log['timestamp'])
+ template_filename = "logs.html"
+ template_values = {'logs': logs}
+ elif path_split[2] == 'logout':
+ message = _('Logging out...') + '<meta http-equiv="refresh" content="0;url=' + \
+ Settings.CGI_URL + 'manage" />'
+ deleteCookie(self, 'weabot_manage')
+ deleteSession(staff_account['session_id'])
+ template_filename = "message.html"
+ elif path_split[2] == 'quotes':
+ # Quotes for the post screen
+ if "save" in self.formdata.keys():
+ try:
+ f = open('quotes.conf', 'w')
+ f.write(self.formdata["data"])
+ f.close()
+ message = 'Datos guardados.'
+ template_filename = "message.html"
+ except:
+ message = 'Error al guardar datos.'
+ template_filename = "message.html"
+ try:
+ f = open('quotes.conf', 'r')
+ data = cgi.escape(f.read(1048576), True)
+ f.close()
+ template_filename = "quotes.html"
+ template_values = {'data': data}
+ except:
+ message = 'Error al leer datos.'
+ template_filename = 'message.html'
+ elif path_split[2] == 'recent_images':
+ try:
+ if int(self.formdata['images']) > 100:
+ images = '100'
+ else:
+ images = self.formdata['images']
+ posts = FetchAll(
+ 'SELECT * FROM `posts` INNER JOIN `boards` ON boardid = boards.id WHERE CHAR_LENGTH(`thumb`) > 0 ORDER BY `timestamp` DESC LIMIT ' + _mysql.escape_string(images))
+ except:
+ posts = FetchAll(
+ 'SELECT * FROM `posts` INNER JOIN `boards` ON boardid = boards.id WHERE CHAR_LENGTH(`thumb`) > 0 ORDER BY `timestamp` DESC LIMIT 10')
+ template_filename = "recent_images.html"
+ template_values = {'posts': posts}
+ elif path_split[2] == 'news':
+ if not administrator:
+ return
+
+ type = 1
+ if 'type' in self.formdata:
+ type = int(self.formdata['type'])
+
+ if type > 2:
+ raise UserError, "Tipo no soportado"
+
+ # canal del home
+ if len(path_split) > 3:
+ if path_split[3] == 'add':
+ t = datetime.datetime.now()
+
+ # Insertar el nuevo post
+ title = ''
+ message = self.formdata["message"].replace(
+ "\n", "<br />")
+
+ # Titulo
+ if 'title' in self.formdata:
+ title = self.formdata["title"]
+
+ # Post anonimo
+ if 'anonymous' in self.formdata.keys() and self.formdata['anonymous'] == '1':
+ to_name = "Staff ★"
+ else:
+ to_name = "%s ★" % staff_account['username']
+ timestamp_formatted = formatDate(t)
+ if type > 0:
+ timestamp_formatted = re.sub(
+ r"\(.+", "", timestamp_formatted)
+ else:
+ timestamp_formatted = re.sub(
+ r"\(...\)", " ", timestamp_formatted)
+
+ UpdateDb("INSERT INTO `news` (type, staffid, staff_name, title, message, name, timestamp, timestamp_formatted) VALUES (%d, '%s', '%s', '%s', '%s', '%s', '%d', '%s')" % (
+ type, staff_account['id'], staff_account['username'], _mysql.escape_string(title), _mysql.escape_string(message), to_name, timestamp(t), timestamp_formatted))
+
+ regenerateNews()
+ regenerateHome()
+ message = _("Added successfully.")
+ template_filename = "message.html"
+ if path_split[3] == 'delete':
+ # Eliminar un post
+ id = int(path_split[4])
+ UpdateDb(
+ "DELETE FROM `news` WHERE id = %d AND type = %d" % (id, type))
+ regenerateNews()
+ regenerateHome()
+ message = _("Deleted successfully.")
+ template_filename = "message.html"
+ else:
+ posts = FetchAll(
+ "SELECT * FROM `news` WHERE type = %d ORDER BY `timestamp` DESC" % type)
+ template_filename = "news.html"
+ template_values = {'action': type, 'posts': posts}
+ elif path_split[2] == 'newschannel':
+ # if not administrator:
+ # return
+
+ if len(path_split) > 3:
+ if path_split[3] == 'add':
+ t = datetime.datetime.now()
+ # Delete old posts
+ #posts = FetchAll("SELECT `id` FROM `news` WHERE `type` = '1' ORDER BY `timestamp` DESC LIMIT "+str(Settings.MODNEWS_MAX_POSTS)+",30")
+ # for post in posts:
+ # UpdateDb("DELETE FROM `news` WHERE id = " + post['id'] + " AND `type` = '0'")
+
+ # Insert new post
+ message = ''
+ try:
+ # Cut long lines
+ message = self.formdata["message"]
+ message = clickableURLs(
+ cgi.escape(message).rstrip()[0:8000])
+ message = onlyAllowedHTML(message)
+ if Settings.USE_MARKDOWN:
+ message = markdown(message)
+ if not Settings.USE_MARKDOWN:
+ message = message.replace("\n", "<br />")
+ except:
+ pass
+
+ # If it's preferred to remain anonymous...
+ if 'anonymous' in self.formdata.keys() and self.formdata['anonymous'] == '1':
+ to_name = "Staff ★"
+ else:
+ to_name = "%s ★" % staff_account['username']
+ timestamp_formatted = formatDate(t)
+
+ UpdateDb("INSERT INTO `news` (type, staffid, staff_name, title, message, name, timestamp, timestamp_formatted) VALUES ('0', '%s', '%s', '%s', '%s', '%s', '%d', '%s')" % (
+ staff_account['id'], staff_account['username'], _mysql.escape_string(self.formdata['title']), _mysql.escape_string(message), to_name, timestamp(t), timestamp_formatted))
+
+ message = _("Added successfully.")
+ template_filename = "message.html"
+ if path_split[3] == 'delete':
+ if not administrator:
+ # We check that if he's not admin, he shouldn't be able to delete other people's posts
+ post = FetchOne("SELECT `staffid` FROM `news` WHERE id = '" +
+ _mysql.escape_string(path_split[4])+"' AND type = '0'")
+ if post['staffid'] != staff_account['id']:
+ self.error(_('That post is not yours.'))
+ return
+
+ # Delete!
+ UpdateDb("DELETE FROM `news` WHERE id = '" +
+ _mysql.escape_string(path_split[4]) + "' AND type = '0'")
+ message = _("Deleted successfully.")
+ template_filename = "message.html"
+ else:
+ # If he's not admin, show only his own posts
+ if administrator:
+ posts = FetchAll(
+ "SELECT * FROM `news` WHERE type = '0' ORDER BY `timestamp` DESC")
+ else:
+ posts = FetchAll("SELECT * FROM `news` WHERE staffid = '" +
+ staff_account['id']+"' AND type = '0' ORDER BY `timestamp` DESC")
+
+ template_filename = "news.html"
+ template_values = {'action': 'newschannel', 'posts': posts}
+ elif path_split[2] == 'reports':
+ if not moderator:
+ return
+
+ message = None
+ import math
+ pagesize = float(Settings.REPORTS_PER_PAGE)
+ totals = FetchOne("SELECT COUNT(id) FROM `reports`")
+ total = int(totals['COUNT(id)'])
+ pages = int(math.ceil(total / pagesize))
+
+ try:
+ currentpage = int(path_split[3])
+ except:
+ currentpage = 0
+
+ if len(path_split) > 4:
+ if path_split[4] == 'ignore':
+ # Delete report
+ UpdateDb("DELETE FROM `reports` WHERE `id` = '" +
+ _mysql.escape_string(path_split[5])+"'")
+ message = _('Report %s ignored.') % path_split[5]
+ if 'ignore' in self.formdata.keys():
+ ignored = 0
+ if 'board' in self.formdata.keys() and self.formdata['board'] != 'all':
+ reports = FetchAll("SELECT `id` FROM `reports` WHERE `board` = '%s' ORDER BY `timestamp` DESC LIMIT %d, %d" % (
+ _mysql.escape_string(self.formdata['board']), currentpage*pagesize, pagesize))
+ else:
+ reports = FetchAll("SELECT `id` FROM `reports` ORDER BY `timestamp` DESC LIMIT %d, %d" % (
+ currentpage*pagesize, pagesize))
+
+ for report in reports:
+ keyname = 'i' + report['id']
+ if keyname in self.formdata.keys():
+ # Ignore here
+ UpdateDb("DELETE FROM `reports` WHERE `id` = '" +
+ _mysql.escape_string(report['id'])+"'")
+ ignored += 1
+
+ message = _('Ignored %s report(s).') % str(ignored)
+
+ # Generate board list
+ boards = FetchAll(
+ 'SELECT `name`, `dir` FROM `boards` ORDER BY `dir`')
+ for board in boards:
+ if 'board' in self.formdata.keys() and self.formdata['board'] == board['dir']:
+ board['checked'] = True
+ else:
+ board['checked'] = False
+
+ # Tabla
+ if 'board' in self.formdata.keys() and self.formdata['board'] != 'all':
+ reports = FetchAll("SELECT id, timestamp, timestamp_formatted, postid, parentid, link, board, INET_NTOA(ip) AS ip, reason, reporterip FROM `reports` WHERE `board` = '%s' ORDER BY `timestamp` DESC LIMIT %d, %d" % (
+ _mysql.escape_string(self.formdata['board']), currentpage*pagesize, pagesize))
+ else:
+ reports = FetchAll("SELECT id, timestamp, timestamp_formatted, postid, parentid, link, board, INET_NTOA(ip) AS ip, reason, reporterip FROM `reports` ORDER BY `timestamp` DESC LIMIT %d, %d" % (
+ currentpage*pagesize, pagesize))
+
+ if 'board' in self.formdata.keys():
+ curboard = self.formdata['board']
+ else:
+ curboard = None
+
+ # for report in reports:
+ # if report['parentid'] == '0':
+ # report['link'] = Settings.BOARDS_URL + report['board'] + '/res/' + report['postid'] + '.html#' + report['postid']
+ # else:
+ # report['link'] = Settings.BOARDS_URL + report['board'] + '/res/' + report['parentid'] + '.html#' + report['postid']
+
+ navigator = ''
+ if currentpage > 0:
+ navigator += '<a href="'+Settings.CGI_URL + \
+ 'manage/reports/'+str(currentpage-1)+'">&lt;</a> '
+ else:
+ navigator += '&lt; '
+
+ for i in range(pages):
+ if i != currentpage:
+ navigator += '<a href="'+Settings.CGI_URL + \
+ 'manage/reports/'+str(i)+'">'+str(i)+'</a> '
+ else:
+ navigator += str(i)+' '
+
+ if currentpage < (pages-1):
+ navigator += '<a href="'+Settings.CGI_URL + \
+ 'manage/reports/'+str(currentpage+1)+'">&gt;</a> '
+ else:
+ navigator += '&gt; '
+
+ template_filename = "reports.html"
+ template_values = {'message': message,
+ 'boards': boards,
+ 'reports': reports,
+ 'currentpage': currentpage,
+ 'curboard': curboard,
+ 'navigator': navigator}
+ # Show by IP
+ elif path_split[2] == 'ipshow':
+ if not moderator:
+ return
+
+ if 'ip' in self.formdata.keys():
+ # If an IP was given...
+ if self.formdata['ip'] != '':
+ formatted_ip = str(inet_aton(self.formdata['ip']))
+ posts = FetchAll(
+ "SELECT posts.*, boards.dir, boards.board_type, boards.subject AS default_subject FROM `posts` JOIN `boards` ON boards.id = posts.boardid WHERE ip = '%s' ORDER BY posts.timestamp DESC" % _mysql.escape_string(formatted_ip))
+ if '.' in self.formdata['ip']:
+ ip = self.formdata['ip']
+ else:
+ ip = inet_ntoa(long(self.formdata['ip']))
+ template_filename = "ipshow.html"
+ template_values = {"mode": 1, "ip": ip, "host": getHost(
+ ip), "country": getCountry(ip), "tor": addressIsTor(ip), "posts": posts}
+ logAction(staff_account['username'],
+ "ipshow on {}".format(ip))
+ else:
+ # Generate form
+ template_filename = "ipshow.html"
+ template_values = {"mode": 0}
+ elif path_split[2] == 'ipdelete':
+ if not moderator:
+ return
+
+ # Delete by IP
+ if 'ip' in self.formdata.keys():
+ # If an IP was given...
+ if self.formdata['ip'] != '':
+ where = []
+ if 'board_all' not in self.formdata.keys():
+ # If he chose boards separately, add them to a list
+ boards = FetchAll(
+ 'SELECT `id`, `dir` FROM `boards`')
+ for board in boards:
+ keyname = 'board_' + board['dir']
+ if keyname in self.formdata.keys():
+ if self.formdata[keyname] == "1":
+ where.append(board)
+ else:
+ # If all boards were selected="selected", all them all to the list
+ where = FetchAll(
+ 'SELECT `id`, `dir` FROM `boards`')
+
+ # If no board was chosen
+ if len(where) <= 0:
+ self.error(_("Select a board first."))
+ return
+
+ deletedPostsTotal = 0
+ ip = inet_aton(self.formdata['ip'])
+ deletedPosts = 0
+ for theboard in where:
+ board = setBoard(theboard['dir'])
+ isDeletedOP = False
+
+ # delete all starting posts first
+ op_posts = FetchAll(
+ "SELECT `id`, `message` FROM posts WHERE parentid = 0 AND boardid = '" + board['id'] + "' AND ip = " + str(ip))
+ for post in op_posts:
+ deletePost(post['id'], None)
+
+ deletedPosts += 1
+ deletedPostsTotal += 1
+
+ replies = FetchAll(
+ "SELECT `id`, `message`, `parentid` FROM posts WHERE parentid != 0 AND boardid = '" + board['id'] + "' AND ip = " + str(ip))
+ for post in replies:
+ deletePost(post['id'], None, '2')
+
+ deletedPosts += 1
+ deletedPostsTotal += 1
+
+ regenerateHome()
+
+ if deletedPosts > 0:
+ message = '%(posts)s post(s) were deleted from %(board)s.' % {
+ 'posts': str(deletedPosts), 'board': '/' + board['dir'] + '/'}
+ template_filename = "message.html"
+ # logAction(staff_account['username'], '%(posts)s post(s) were deleted from %(board)s. IP: %(ip)s' % \
+ # {'posts': str(deletedPosts),
+ # 'board': '/' + board['dir'] + '/',
+ # 'ip': self.formdata['ip']})
+ else:
+ self.error(_("Please enter an IP first."))
+ return
+
+ message = 'In total %(posts)s from IP %(ip)s were deleted.' % {
+ 'posts': str(deletedPosts), 'ip': self.formdata['ip']}
+ logAction(staff_account['username'], message)
+ template_filename = "message.html"
+ else:
+ # Generate form...
+ template_filename = "ipdelete.html"
+ template_values = {'boards': boardlist()}
+ elif path_split[2] == 'search':
+ if not administrator:
+ return
+ search_logs = FetchAll(
+ 'SELECT `id`,`timestamp`,`keyword`,`ita`,INET_NTOA(`ip`) AS `ip`,`res` FROM `search_log` ORDER BY `timestamp` DESC LIMIT 250')
+ for log in search_logs:
+ #log['ip'] = str(inet_ntoa(log['ip']))
+ log['timestamp_formatted'] = formatTimestamp(
+ log['timestamp'])
+ if log['keyword'].startswith('k '):
+ log['keyword'] = log['keyword'][2:]
+ log['archive'] = True
+ else:
+ log['archive'] = False
+ template_filename = "search.html"
+ template_values = {'search': search_logs}
else:
- # If he's not admin, show only his own posts
- if administrator:
- posts = FetchAll("SELECT * FROM `news` WHERE type = '0' ORDER BY `timestamp` DESC")
- else:
- posts = FetchAll("SELECT * FROM `news` WHERE staffid = '"+staff_account['id']+"' AND type = '0' ORDER BY `timestamp` DESC")
-
- template_filename = "news.html"
- template_values = {'action': 'newschannel', 'posts': posts}
- elif path_split[2] == 'reports':
- if not moderator:
- return
-
- message = None
- import math
- pagesize = float(Settings.REPORTS_PER_PAGE)
- totals = FetchOne("SELECT COUNT(id) FROM `reports`")
- total = int(totals['COUNT(id)'])
- pages = int(math.ceil(total / pagesize))
-
+ # Main page.
+ reports = FetchOne("SELECT COUNT(1) FROM `reports`", 0)[0]
+ posts = FetchAll(
+ "SELECT * FROM `news` WHERE type = '0' ORDER BY `timestamp` DESC")
+
+ template_filename = "manage.html"
+ template_values = {'reports': reports, 'posts': posts}
+
+ if not skiptemplate:
try:
- currentpage = int(path_split[3])
+ if template_filename == 'message.html':
+ template_values = {'message': message}
except:
- currentpage = 0
-
- if len(path_split) > 4:
- if path_split[4] == 'ignore':
- # Delete report
- UpdateDb("DELETE FROM `reports` WHERE `id` = '"+_mysql.escape_string(path_split[5])+"'")
- message = _('Report %s ignored.') % path_split[5]
- if 'ignore' in self.formdata.keys():
- ignored = 0
- if 'board' in self.formdata.keys() and self.formdata['board'] != 'all':
- reports = FetchAll("SELECT `id` FROM `reports` WHERE `board` = '%s' ORDER BY `timestamp` DESC LIMIT %d, %d" % (_mysql.escape_string(self.formdata['board']), currentpage*pagesize, pagesize))
- else:
- reports = FetchAll("SELECT `id` FROM `reports` ORDER BY `timestamp` DESC LIMIT %d, %d" % (currentpage*pagesize, pagesize))
-
- for report in reports:
- keyname = 'i' + report['id']
- if keyname in self.formdata.keys():
- # Ignore here
- UpdateDb("DELETE FROM `reports` WHERE `id` = '"+_mysql.escape_string(report['id'])+"'")
- ignored += 1
-
- message = _('Ignored %s report(s).') % str(ignored)
-
- # Generate board list
- boards = FetchAll('SELECT `name`, `dir` FROM `boards` ORDER BY `dir`')
- for board in boards:
- if 'board' in self.formdata.keys() and self.formdata['board'] == board['dir']:
- board['checked'] = True
- else:
- board['checked'] = False
-
- # Tabla
- if 'board' in self.formdata.keys() and self.formdata['board'] != 'all':
- reports = FetchAll("SELECT id, timestamp, timestamp_formatted, postid, parentid, link, board, INET_NTOA(ip) AS ip, reason, reporterip FROM `reports` WHERE `board` = '%s' ORDER BY `timestamp` DESC LIMIT %d, %d" % (_mysql.escape_string(self.formdata['board']), currentpage*pagesize, pagesize))
- else:
- reports = FetchAll("SELECT id, timestamp, timestamp_formatted, postid, parentid, link, board, INET_NTOA(ip) AS ip, reason, reporterip FROM `reports` ORDER BY `timestamp` DESC LIMIT %d, %d" % (currentpage*pagesize, pagesize))
-
- if 'board' in self.formdata.keys():
- curboard = self.formdata['board']
- else:
- curboard = None
-
- #for report in reports:
- # if report['parentid'] == '0':
- # report['link'] = Settings.BOARDS_URL + report['board'] + '/res/' + report['postid'] + '.html#' + report['postid']
- # else:
- # report['link'] = Settings.BOARDS_URL + report['board'] + '/res/' + report['parentid'] + '.html#' + report['postid']
-
- navigator = ''
- if currentpage > 0:
- navigator += '<a href="'+Settings.CGI_URL+'manage/reports/'+str(currentpage-1)+'">&lt;</a> '
- else:
- navigator += '&lt; '
-
- for i in range(pages):
- if i != currentpage:
- navigator += '<a href="'+Settings.CGI_URL+'manage/reports/'+str(i)+'">'+str(i)+'</a> '
- else:
- navigator += str(i)+' '
-
- if currentpage < (pages-1):
- navigator += '<a href="'+Settings.CGI_URL+'manage/reports/'+str(currentpage+1)+'">&gt;</a> '
- else:
- navigator += '&gt; '
-
- template_filename = "reports.html"
- template_values = {'message': message,
- 'boards': boards,
- 'reports': reports,
- 'currentpage': currentpage,
- 'curboard': curboard,
- 'navigator': navigator}
- # Show by IP
- elif path_split[2] == 'ipshow':
- if not moderator:
- return
-
- if 'ip' in self.formdata.keys():
- # If an IP was given...
- if self.formdata['ip'] != '':
- formatted_ip = str(inet_aton(self.formdata['ip']))
- posts = FetchAll("SELECT posts.*, boards.dir, boards.board_type, boards.subject AS default_subject FROM `posts` JOIN `boards` ON boards.id = posts.boardid WHERE ip = '%s' ORDER BY posts.timestamp DESC" % _mysql.escape_string(formatted_ip))
- if '.' in self.formdata['ip']:
- ip = self.formdata['ip']
- else:
- ip = inet_ntoa(long(self.formdata['ip']))
- template_filename = "ipshow.html"
- template_values = {"mode": 1, "ip": ip, "host": getHost(ip), "country": getCountry(ip), "tor": addressIsTor(ip), "posts": posts}
- logAction(staff_account['username'], "ipshow on {}".format(ip))
- else:
- # Generate form
- template_filename = "ipshow.html"
- template_values = {"mode": 0}
- elif path_split[2] == 'ipdelete':
- if not moderator:
- return
-
- # Delete by IP
- if 'ip' in self.formdata.keys():
- # If an IP was given...
- if self.formdata['ip'] != '':
- where = []
- if 'board_all' not in self.formdata.keys():
- # If he chose boards separately, add them to a list
- boards = FetchAll('SELECT `id`, `dir` FROM `boards`')
- for board in boards:
- keyname = 'board_' + board['dir']
- if keyname in self.formdata.keys():
- if self.formdata[keyname] == "1":
- where.append(board)
- else:
- # If all boards were selected="selected", all them all to the list
- where = FetchAll('SELECT `id`, `dir` FROM `boards`')
-
- # If no board was chosen
- if len(where) <= 0:
- self.error(_("Select a board first."))
- return
-
- deletedPostsTotal = 0
- ip = inet_aton(self.formdata['ip'])
- deletedPosts = 0
- for theboard in where:
- board = setBoard(theboard['dir'])
- isDeletedOP = False
-
- # delete all starting posts first
- op_posts = FetchAll("SELECT `id`, `message` FROM posts WHERE parentid = 0 AND boardid = '" + board['id'] + "' AND ip = " + str(ip))
- for post in op_posts:
- deletePost(post['id'], None)
-
- deletedPosts += 1
- deletedPostsTotal += 1
-
- replies = FetchAll("SELECT `id`, `message`, `parentid` FROM posts WHERE parentid != 0 AND boardid = '" + board['id'] + "' AND ip = " + str(ip))
- for post in replies:
- deletePost(post['id'], None, '2')
-
- deletedPosts += 1
- deletedPostsTotal += 1
-
- regenerateHome()
-
- if deletedPosts > 0:
- message = '%(posts)s post(s) were deleted from %(board)s.' % {'posts': str(deletedPosts), 'board': '/' + board['dir'] + '/'}
- template_filename = "message.html"
- #logAction(staff_account['username'], '%(posts)s post(s) were deleted from %(board)s. IP: %(ip)s' % \
- # {'posts': str(deletedPosts),
- # 'board': '/' + board['dir'] + '/',
- # 'ip': self.formdata['ip']})
- else:
- self.error(_("Please enter an IP first."))
- return
-
- message = 'In total %(posts)s from IP %(ip)s were deleted.' % {'posts': str(deletedPosts), 'ip': self.formdata['ip']}
- logAction(staff_account['username'], message)
- template_filename = "message.html"
- else:
- # Generate form...
- template_filename = "ipdelete.html"
- template_values = {'boards': boardlist()}
- elif path_split[2] == 'search':
- if not administrator:
- return
- search_logs = FetchAll('SELECT `id`,`timestamp`,`keyword`,`ita`,INET_NTOA(`ip`) AS `ip`,`res` FROM `search_log` ORDER BY `timestamp` DESC LIMIT 250')
- for log in search_logs:
- #log['ip'] = str(inet_ntoa(log['ip']))
- log['timestamp_formatted'] = formatTimestamp(log['timestamp'])
- if log['keyword'].startswith('k '):
- log['keyword'] = log['keyword'][2:]
- log['archive'] = True
- else:
- log['archive'] = False
- template_filename = "search.html"
- template_values = {'search': search_logs}
- else:
- # Main page.
- reports = FetchOne("SELECT COUNT(1) FROM `reports`", 0)[0]
- posts = FetchAll("SELECT * FROM `news` WHERE type = '0' ORDER BY `timestamp` DESC")
-
- template_filename = "manage.html"
- template_values = {'reports': reports, 'posts': posts}
-
- if not skiptemplate:
- try:
- if template_filename == 'message.html':
- template_values = {'message': message}
- except:
- template_filename = 'message.html'
- template_values = {'message': '???'}
-
- template_values.update({
- 'title': 'Manage',
- 'validated': validated,
- 'page': page,
- })
-
- if validated:
- template_values.update({
- 'username': staff_account['username'],
- 'site_title': Settings.SITE_TITLE,
- 'rights': staff_account['rights'],
- 'administrator': administrator,
- 'added': formatTimestamp(staff_account['added']),
- })
-
- self.output += renderTemplate("manage/" + template_filename, template_values)
-
+ template_filename = 'message.html'
+ template_values = {'message': '???'}
+
+ template_values.update({
+ 'title': 'Manage',
+ 'validated': validated,
+ 'page': page,
+ })
+
+ if validated:
+ template_values.update({
+ 'username': staff_account['username'],
+ 'site_title': Settings.SITE_TITLE,
+ 'rights': staff_account['rights'],
+ 'administrator': administrator,
+ 'added': formatTimestamp(staff_account['added']),
+ })
+
+ self.output += renderTemplate("manage/" +
+ template_filename, template_values)
+
+
def switchBoard(new_type):
- board = Settings._.BOARD
-
- if new_type == board['board_type']:
- return
-
- kako_dir = os.path.join(Settings.ROOT_DIR, board['dir'], 'kako')
- res_dir = os.path.join(Settings.ROOT_DIR, board['dir'], 'res')
-
- if new_type == '0':
- # Switching to Imageboard
- # Delete kako if empty
- if os.path.exists(kako_dir) and not os.listdir(kako_dir):
- os.rmdir(kako_dir)
- elif new_type == '1':
- # Switching to Textboard
- # Make kako dir
- if not os.path.exists(kako_dir):
- os.mkdir(kako_dir)
-
- # Clean res dir
- cleanDir(res_dir, ext="html")
+ board = Settings._.BOARD
+
+ if new_type == board['board_type']:
+ return
+
+ kako_dir = os.path.join(Settings.ROOT_DIR, board['dir'], 'kako')
+ res_dir = os.path.join(Settings.ROOT_DIR, board['dir'], 'res')
+
+ if new_type == '0':
+ # Switching to Imageboard
+ # Delete kako if empty
+ if os.path.exists(kako_dir) and not os.listdir(kako_dir):
+ os.rmdir(kako_dir)
+ elif new_type == '1':
+ # Switching to Textboard
+ # Make kako dir
+ if not os.path.exists(kako_dir):
+ os.mkdir(kako_dir)
+
+ # Clean res dir
+ cleanDir(res_dir, ext="html")
+
def newSession(staff_id):
- import uuid
- session_uuid = uuid.uuid4().hex
-
- param_session_id = _mysql.escape_string(session_uuid)
- param_expires = timestamp() + Settings.SESSION_TIME
- param_staff_id = int(staff_id)
-
- InsertDb("INSERT INTO `session` (`session_id`, `expires`, `staff_id`) VALUES (UNHEX('%s'), %d, %d)" %
- (param_session_id, param_expires, param_staff_id))
-
- return session_uuid
+ import uuid
+ session_uuid = uuid.uuid4().hex
+
+ param_session_id = _mysql.escape_string(session_uuid)
+ param_expires = timestamp() + Settings.SESSION_TIME
+ param_staff_id = int(staff_id)
+
+ InsertDb("INSERT INTO `session` (`session_id`, `expires`, `staff_id`) VALUES (UNHEX('%s'), %d, %d)" %
+ (param_session_id, param_expires, param_staff_id))
+
+ return session_uuid
+
def validateSession(session_id):
- cleanSessions()
-
- param_session_id = _mysql.escape_string(session_id)
- param_now = timestamp()
- session = FetchOne(
- "SELECT HEX(session_id) as session_id, id, username, rights, added FROM `session` "
- "INNER JOIN `staff` ON `session`.`staff_id` = `staff`.`id` "
- "WHERE `session_id` = UNHEX('%s')" %
- (param_session_id))
-
- if session:
- return session
-
- return None
+ cleanSessions()
+
+ param_session_id = _mysql.escape_string(session_id)
+ param_now = timestamp()
+ session = FetchOne(
+ "SELECT HEX(session_id) as session_id, id, username, rights, added FROM `session` "
+ "INNER JOIN `staff` ON `session`.`staff_id` = `staff`.`id` "
+ "WHERE `session_id` = UNHEX('%s')" %
+ (param_session_id))
+
+ if session:
+ return session
+
+ return None
+
def renewSession(session_id):
- param_session_id = _mysql.escape_string(session_id)
- param_expires = timestamp() + Settings.SESSION_TIME
-
- UpdateDb("UPDATE `session` SET expires = %d WHERE session_id = UNHEX('%s')" %
- (param_expires, param_session_id))
+ param_session_id = _mysql.escape_string(session_id)
+ param_expires = timestamp() + Settings.SESSION_TIME
+
+ UpdateDb("UPDATE `session` SET expires = %d WHERE session_id = UNHEX('%s')" %
+ (param_expires, param_session_id))
+
def deleteSession(session_id):
- param_session_id = _mysql.escape_string(session_id)
-
- UpdateDb("DELETE FROM `session` WHERE session_id = UNHEX('%s')" % param_session_id)
-
+ param_session_id = _mysql.escape_string(session_id)
+
+ UpdateDb("DELETE FROM `session` WHERE session_id = UNHEX('%s')" %
+ param_session_id)
+
+
def cleanSessions():
- param_now = timestamp()
-
- UpdateDb("DELETE FROM `session` WHERE expires <= %d" % param_now)
+ param_now = timestamp()
+
+ UpdateDb("DELETE FROM `session` WHERE expires <= %d" % param_now)
+
def logAction(staff, action):
- InsertDb("INSERT INTO `logs` (`timestamp`, `staff`, `action`) VALUES (" + str(timestamp()) + ", '" + _mysql.escape_string(staff) + "\', \'" + _mysql.escape_string(action) + "\')")
+ InsertDb("INSERT INTO `logs` (`timestamp`, `staff`, `action`) VALUES (" + str(timestamp()) +
+ ", '" + _mysql.escape_string(staff) + "\', \'" + _mysql.escape_string(action) + "\')")
+
def genPasswdHash(string):
- import argon2
- ph = argon2.PasswordHasher()
-
- return ph.hash(string)
+ import argon2
+ ph = argon2.PasswordHasher()
+
+ return ph.hash(string)
+
def verifyPasswd(username, passwd):
- import argon2
- ph = argon2.PasswordHasher()
-
- param_username = _mysql.escape_string(username)
- staff_account = FetchOne("SELECT * FROM staff WHERE username = '%s'" % param_username)
- if not staff_account:
- return None
-
- try:
- ph.verify(staff_account['password'], passwd)
- except argon2.exceptions.VerifyMismatchError:
- return None
- except argon2.exceptions.InvalidHash:
- raise UserError, "Hash obsoleto o inválido. Por favor contacte al administrador."
-
- if ph.check_needs_rehash(staff_account['password']):
- param_new_hash = ph.hash(staff_acount['password'])
- UpdateDb("UPDATE staff SET password = '%s' WHERE id = %s" %
- (param_new_hash, staff_account['id']))
-
- return staff_account
+ import argon2
+ ph = argon2.PasswordHasher()
+
+ param_username = _mysql.escape_string(username)
+ staff_account = FetchOne(
+ "SELECT * FROM staff WHERE username = '%s'" % param_username)
+ if not staff_account:
+ return None
+
+ try:
+ ph.verify(staff_account['password'], passwd)
+ except argon2.exceptions.VerifyMismatchError:
+ return None
+ except argon2.exceptions.InvalidHash:
+ raise UserError, "Hash obsoleto o inválido. Por favor contacte al administrador."
+
+ if ph.check_needs_rehash(staff_account['password']):
+ param_new_hash = ph.hash(staff_acount['password'])
+ UpdateDb("UPDATE staff SET password = '%s' WHERE id = %s" %
+ (param_new_hash, staff_account['id']))
+
+ return staff_account
+
def boardlist():
- boards = FetchAll('SELECT * FROM `boards` ORDER BY `board_type`, `dir`')
- return boards
-
+ boards = FetchAll('SELECT * FROM `boards` ORDER BY `board_type`, `dir`')
+ return boards
+
+
def filetypelist():
- filetypes = FetchAll('SELECT * FROM `filetypes` ORDER BY `ext` ASC')
- return filetypes
+ filetypes = FetchAll('SELECT * FROM `filetypes` ORDER BY `ext` ASC')
+ return filetypes
diff --git a/cgi/markdown.py b/cgi/markdown.py
index 3ebfaab..846c192 100644
--- a/cgi/markdown.py
+++ b/cgi/markdown.py
@@ -33,6 +33,15 @@ number of extras (e.g., code syntax coloring, footnotes) as described on
<http://code.google.com/p/python-markdown2/wiki/Extras>.
"""
+from urllib import quote
+import codecs
+from random import random, randint
+import optparse
+import logging
+import re
+from pprint import pprint
+import sys
+import os
cmdln_desc = """A fast and complete Python implementation of Markdown, a
text-to-HTML conversion tool for web writers.
@@ -59,33 +68,25 @@ Supported extras (see -x|--extras option below):
# not yet sure if there implications with this. Compare 'pydoc sre'
# and 'perldoc perlre'.
-__version_info__ = (1, 0, 1, 17) # first three nums match Markdown.pl
+__version_info__ = (1, 0, 1, 17) # first three nums match Markdown.pl
__version__ = '1.0.1.17'
__author__ = "Trent Mick"
-import os
-import sys
-from pprint import pprint
-import re
-import logging
try:
from hashlib import md5
except ImportError:
from md5 import md5
-import optparse
-from random import random, randint
-import codecs
-from urllib import quote
-
-#---- Python version compat
+# ---- Python version compat
-if sys.version_info[:2] < (2,4):
+if sys.version_info[:2] < (2, 4):
from sets import Set as set
+
def reversed(sequence):
for i in sequence[::-1]:
yield i
+
def _unicode_decode(s, encoding, errors='xmlcharrefreplace'):
return unicode(s, encoding, errors)
else:
@@ -107,26 +108,29 @@ except ImportError:
SECRET_SALT = str(randint(0, 1000000))
else:
SECRET_SALT = str(uuid.uuid4())
+
+
def _hash_ascii(s):
- #return md5(s).hexdigest() # Markdown.pl effectively does this.
+ # return md5(s).hexdigest() # Markdown.pl effectively does this.
return 'md5-' + md5(SECRET_SALT + s).hexdigest()
+
+
def _hash_text(s):
return 'md5-' + md5(SECRET_SALT + s.encode("utf-8")).hexdigest()
+
# Table of hash values for escaped characters:
g_escape_table = dict([(ch, _hash_ascii(ch))
for ch in '\\`*_{}[]()>#+-.!'])
-
#---- exceptions
class MarkdownError(Exception):
pass
-
-#---- public api
+# ---- public api
def markdown_path(path, encoding="utf-8",
html4tags=False, tab_width=DEFAULT_TAB_WIDTH,
@@ -140,6 +144,7 @@ def markdown_path(path, encoding="utf-8",
link_patterns=link_patterns,
use_file_vars=use_file_vars).convert(text)
+
def markdown(text, html4tags=False, tab_width=DEFAULT_TAB_WIDTH,
safe_mode=None, extras=None, link_patterns=None,
use_file_vars=False):
@@ -148,6 +153,7 @@ def markdown(text, html4tags=False, tab_width=DEFAULT_TAB_WIDTH,
link_patterns=link_patterns,
use_file_vars=use_file_vars).convert(text)
+
class Markdown(object):
# The dict of "extras" to enable in processing -- a mapping of
# extra name to argument for the extra. Most extras do not have an
@@ -178,7 +184,7 @@ class Markdown(object):
self.tab_width = tab_width
# For compatibility with earlier markdown2.py and with
- # markdown.py's safe_mode being a boolean,
+ # markdown.py's safe_mode being a boolean,
# safe_mode == True -> "replace"
if safe_mode is True:
self.safe_mode = "replace"
@@ -212,7 +218,7 @@ class Markdown(object):
self.footnotes = {}
self.footnote_ids = []
if "header-ids" in self.extras:
- self._count_from_header_id = {} # no `defaultdict` in Python 2.4
+ self._count_from_header_id = {} # no `defaultdict` in Python 2.4
def convert(self, text):
"""Convert the given text."""
@@ -228,7 +234,7 @@ class Markdown(object):
self.reset()
if not isinstance(text, unicode):
- #TODO: perhaps shouldn't presume UTF-8 for string input?
+ # TODO: perhaps shouldn't presume UTF-8 for string input?
text = unicode(text, 'utf-8')
if self.use_file_vars:
@@ -287,13 +293,14 @@ class Markdown(object):
text = self._unhash_html_spans(text)
#text += "\n"
-
+
rv = UnicodeWithAttrs(text)
if "toc" in self.extras:
rv._toc = self._toc
return rv
- _emacs_oneliner_vars_pat = re.compile(r"-\*-\s*([^\r\n]*?)\s*-\*-", re.UNICODE)
+ _emacs_oneliner_vars_pat = re.compile(
+ r"-\*-\s*([^\r\n]*?)\s*-\*-", re.UNICODE)
# This regular expression is intended to match blocks like this:
# PREFIX Local Variables: SUFFIX
# PREFIX mode: Tcl SUFFIX
@@ -317,7 +324,7 @@ class Markdown(object):
http://www.gnu.org/software/emacs/manual/html_node/emacs/Specifying-File-Variables.html#Specifying-File-Variables
"""
emacs_vars = {}
- SIZE = pow(2, 13) # 8kB
+ SIZE = pow(2, 13) # 8kB
# Search near the start for a '-*-'-style one-liner of variables.
head = text[:SIZE]
@@ -374,9 +381,12 @@ class Markdown(object):
# Parse out one emacs var per line.
continued_for = None
- for line in lines[:-1]: # no var on the last line ("PREFIX End:")
- if prefix: line = line[len(prefix):] # strip prefix
- if suffix: line = line[:-len(suffix)] # strip suffix
+ # no var on the last line ("PREFIX End:")
+ for line in lines[:-1]:
+ if prefix:
+ line = line[len(prefix):] # strip prefix
+ if suffix:
+ line = line[:-len(suffix)] # strip suffix
line = line.strip()
if continued_for:
variable = continued_for
@@ -405,7 +415,7 @@ class Markdown(object):
# Unquote values.
for var, val in emacs_vars.items():
if len(val) > 1 and (val.startswith('"') and val.endswith('"')
- or val.startswith('"') and val.endswith('"')):
+ or val.startswith('"') and val.endswith('"')):
emacs_vars[var] = val[1:-1]
return emacs_vars
@@ -413,9 +423,11 @@ class Markdown(object):
# Cribbed from a post by Bart Lateur:
# <http://www.nntp.perl.org/group/perl.macperl.anyperl/154>
_detab_re = re.compile(r'(.*?)\t', re.M)
+
def _detab_sub(self, match):
g1 = match.group(1)
return g1 + (' ' * (self.tab_width - len(g1) % self.tab_width))
+
def _detab(self, text):
r"""Remove (leading?) tabs from a file.
@@ -447,7 +459,7 @@ class Markdown(object):
(?=\n+|\Z) # followed by a newline or end of document
)
""" % _block_tags_a,
- re.X | re.M)
+ re.X | re.M)
_block_tags_b = 'p|div|h[1-6]|blockquote|pre|table|dl|ol|ul|script|noscript|form|fieldset|iframe|math'
_liberal_tag_block_re = re.compile(r"""
@@ -461,7 +473,7 @@ class Markdown(object):
(?=\n+|\Z) # followed by a newline or end of document
)
""" % _block_tags_b,
- re.X | re.M)
+ re.X | re.M)
def _hash_html_block_sub(self, match, raw=False):
html = match.group(1)
@@ -506,7 +518,7 @@ class Markdown(object):
text = self._liberal_tag_block_re.sub(hash_html_block_sub, text)
# Special case just for <hr />. It was easier to make a special
- # case than to make the other regex more complicated.
+ # case than to make the other regex more complicated.
if "<hr" in text:
_hr_tag_re = _hr_tag_re_from_tab_width(self.tab_width)
text = _hr_tag_re.sub(hash_html_block_sub, text)
@@ -564,12 +576,13 @@ class Markdown(object):
html = self._sanitize_html(html)
key = _hash_text(html)
self.html_blocks[key] = html
- text = text[:start_idx] + "\n\n" + key + "\n\n" + text[end_idx:]
+ text = text[:start_idx] + "\n\n" + \
+ key + "\n\n" + text[end_idx:]
if "xml" in self.extras:
# Treat XML processing instructions and namespaced one-liner
# tags as if they were block HTML tags. E.g., if standalone
- # (i.e. are their own paragraph), the following do not get
+ # (i.e. are their own paragraph), the following do not get
# wrapped in a <p> tag:
# <?foo bar?>
#
@@ -583,7 +596,7 @@ class Markdown(object):
# Strips link definitions from text, stores the URLs and titles in
# hash references.
less_than_tab = self.tab_width - 1
-
+
# Link defs are in the form:
# [id]: url "optional title"
_link_def_re = re.compile(r"""
@@ -634,7 +647,7 @@ class Markdown(object):
- The 'note-id' can be pretty much anything, though typically it
is the number of the footnote.
- The first paragraph may start on the next line, like so:
-
+
[^note-id]:
Text of the note.
"""
@@ -656,7 +669,6 @@ class Markdown(object):
re.X | re.M)
return footnote_def_re.sub(self._extract_footnote_def_sub, text)
-
_hr_res = [
re.compile(r"^[ ]{0,2}([ ]?\*[ ]?){3,}[ \t]*$", re.M),
re.compile(r"^[ ]{0,2}([ ]?\-[ ]?){3,}[ \t]*$", re.M),
@@ -671,7 +683,7 @@ class Markdown(object):
# Do Horizontal Rules:
#hr = "\n<hr"+self.empty_element_suffix+"\n"
- #for hr_re in self._hr_res:
+ # for hr_re in self._hr_res:
# text = hr_re.sub(hr, text)
text = self._do_lists(text)
@@ -697,11 +709,11 @@ class Markdown(object):
lines = match.group(0).splitlines(0)
_dedentlines(lines)
indent = ' ' * self.tab_width
- s = ('\n' # separate from possible cuddled paragraph
+ s = ('\n' # separate from possible cuddled paragraph
+ indent + ('\n'+indent).join(lines)
+ '\n\n')
return s
-
+
def _prepare_pyshell_blocks(self, text):
"""Ensure that Python interactive shell sessions are put in
code blocks -- even if not properly indented.
@@ -721,14 +733,14 @@ class Markdown(object):
def _run_span_gamut(self, text):
# These are all the transformations that occur *within* block-level
# tags like paragraphs, headers, and list items.
-
- #text = self._do_code_spans(text) - El AA !
-
+
+ # text = self._do_code_spans(text) - El AA !
+
text = self._escape_special_chars(text)
-
+
# Process anchor and image tags.
text = self._do_links(text)
-
+
# Make links out of things like `<http://example.com/>`
# Must come after _do_links(), because you can use < and >
# delimiters in inline links like [this](<url>).
@@ -736,14 +748,14 @@ class Markdown(object):
if "link-patterns" in self.extras:
text = self._do_link_patterns(text)
-
+
text = self._encode_amps_and_angles(text)
-
+
text = self._do_italics_and_bold(text)
-
+
# Do hard breaks:
text = re.sub(r"\n", "<br%s" % self.empty_element_suffix, text)
-
+
return text
# "Sorta" because auto-links are identified as "tag" tokens.
@@ -763,7 +775,7 @@ class Markdown(object):
<\?.*?\?> # processing instruction
)
""", re.X)
-
+
def _escape_special_chars(self, text):
# Python markdown note: the HTML tokenization here differs from
# that in Markdown.pl, hence the behaviour for subtle cases can
@@ -875,7 +887,7 @@ class Markdown(object):
anchor_allowed_pos = 0
curr_pos = 0
- while True: # Handle the next link.
+ while True: # Handle the next link.
# The next '[' is the start of:
# - an inline anchor: [text](url "title")
# - a reference anchor: [text][id]
@@ -903,7 +915,7 @@ class Markdown(object):
# matching brackets in img alt text -- we'll differ in that
# regard.
bracket_depth = 0
- for p in range(start_idx+1, min(start_idx+MAX_LINK_TEXT_SENTINEL,
+ for p in range(start_idx+1, min(start_idx+MAX_LINK_TEXT_SENTINEL,
text_length)):
ch = text[p]
if ch == ']':
@@ -939,12 +951,12 @@ class Markdown(object):
return text
# Inline anchor or img?
- if text[p] == '(': # attempt at perf improvement
+ if text[p] == '(': # attempt at perf improvement
match = self._tail_of_inline_link_re.match(text, p)
if match:
# Handle an inline anchor or img.
#is_img = start_idx > 0 and text[start_idx-1] == "!"
- #if is_img:
+ # if is_img:
# start_idx -= 1
is_img = False
@@ -988,7 +1000,7 @@ class Markdown(object):
if match:
# Handle a reference-style anchor or img.
#is_img = start_idx > 0 and text[start_idx-1] == "!"
- #if is_img:
+ # if is_img:
# start_idx -= 1
is_img = False
@@ -1014,7 +1026,8 @@ class Markdown(object):
link_text.replace('"', '&quot;'),
title_str, self.empty_element_suffix)
curr_pos = start_idx + len(result)
- text = text[:start_idx] + result + text[match.end():]
+ text = text[:start_idx] + \
+ result + text[match.end():]
elif start_idx >= anchor_allowed_pos:
result = '<a href="%s"%s>%s</a>' \
% (url, title_str, link_text)
@@ -1024,7 +1037,8 @@ class Markdown(object):
# anchor_allowed_pos on.
curr_pos = start_idx + len(result_head)
anchor_allowed_pos = start_idx + len(result)
- text = text[:start_idx] + result + text[match.end():]
+ text = text[:start_idx] + \
+ result + text[match.end():]
else:
# Anchor not allowed here.
curr_pos = start_idx + 1
@@ -1036,12 +1050,12 @@ class Markdown(object):
# Otherwise, it isn't markup.
curr_pos = start_idx + 1
- return text
+ return text
def header_id_from_text(self, text, prefix):
"""Generate a header id attribute value from the given header
HTML content.
-
+
This is only called if the "header-ids" extra is enabled.
Subclasses may override this for different header ids.
"""
@@ -1056,12 +1070,14 @@ class Markdown(object):
return header_id
_toc = None
+
def _toc_add_entry(self, level, id, name):
if self._toc is None:
self._toc = []
self._toc.append((level, id, name))
_setext_h_re = re.compile(r'^(.+)[ \t]*\n(=+|-+)[ \t]*\n+', re.M)
+
def _setext_h_sub(self, match):
n = {"=": 1, "-": 2}[match.group(2)[0]]
demote_headers = self.extras.get("demote-headers")
@@ -1070,7 +1086,7 @@ class Markdown(object):
header_id_attr = ""
if "header-ids" in self.extras:
header_id = self.header_id_from_text(match.group(1),
- prefix=self.extras["header-ids"])
+ prefix=self.extras["header-ids"])
header_id_attr = ' id="%s"' % header_id
html = self._run_span_gamut(match.group(1))
if "toc" in self.extras:
@@ -1086,6 +1102,7 @@ class Markdown(object):
\#* # optional closing #'s (not counted)
\n+
''', re.X | re.M)
+
def _atx_h_sub(self, match):
n = len(match.group(1))
demote_headers = self.extras.get("demote-headers")
@@ -1094,7 +1111,7 @@ class Markdown(object):
header_id_attr = ""
if "header-ids" in self.extras:
header_id = self.header_id_from_text(match.group(2),
- prefix=self.extras["header-ids"])
+ prefix=self.extras["header-ids"])
header_id_attr = ' id="%s"' % header_id
html = self._run_span_gamut(match.group(2))
if "toc" in self.extras:
@@ -1105,7 +1122,7 @@ class Markdown(object):
# Setext-style headers:
# Header 1
# ========
- #
+ #
# Header 2
# --------
text = self._setext_h_re.sub(self._setext_h_sub, text)
@@ -1120,8 +1137,7 @@ class Markdown(object):
return text
-
- _marker_ul_chars = '*+-'
+ _marker_ul_chars = '*+-'
_marker_any = r'(?:[%s]|\d+\.)' % _marker_ul_chars
_marker_ul = '(?:[%s])' % _marker_ul_chars
_marker_ol = r'(?:\d+\.)'
@@ -1161,7 +1177,7 @@ class Markdown(object):
)
)
''' % (less_than_tab, marker_pat, marker_pat)
-
+
# We use a different prefix before nested lists than top-level lists.
# See extended comment in _process_list_items().
#
@@ -1189,7 +1205,7 @@ class Markdown(object):
text = list_re.sub(self._list_sub, text)
return text
-
+
_list_item_re = re.compile(r'''
(\n)? # leading line = \1
(^[ \t]*) # leading whitespace = \2
@@ -1201,6 +1217,7 @@ class Markdown(object):
re.M | re.X | re.S)
_last_li_endswith_two_eols = False
+
def _list_item_sub(self, match):
item = match.group(4)
leading_line = match.group(1)
@@ -1219,7 +1236,7 @@ class Markdown(object):
def _process_list_items(self, list_str):
# Process the contents of a single ordered or unordered list,
# splitting it into individual list items.
-
+
# The $g_list_level global keeps track of when we're inside a list.
# Each time we enter a list, we increment it; when we leave a list,
# we decrement. If it's zero, we're not in a list anymore.
@@ -1268,7 +1285,7 @@ class Markdown(object):
"""
yield 0, "<code>"
for tup in inner:
- yield tup
+ yield tup
yield 0, "</code>"
def wrap(self, source, outfile):
@@ -1333,7 +1350,6 @@ class Markdown(object):
return code_block_re.sub(self._code_block_sub, text)
-
# Rules for a code span:
# - backslash escapes are not interpreted in a code span
# - to include one or or a run of more backticks the delimiters must
@@ -1359,26 +1375,26 @@ class Markdown(object):
def _do_code_spans(self, text):
# * Backtick quotes are used for <code></code> spans.
- #
+ #
# * You can use multiple backticks as the delimiters if you want to
# include literal backticks in the code span. So, this input:
- #
+ #
# Just type ``foo `bar` baz`` at the prompt.
- #
+ #
# Will translate to:
- #
+ #
# <p>Just type <code>foo `bar` baz</code> at the prompt.</p>
- #
+ #
# There's no arbitrary limit to the number of backticks you
# can use as delimters. If you need three consecutive backticks
# in your code, use four for delimiters, etc.
#
# * You can use spaces to get literal backticks at the edges:
- #
+ #
# ... type `` `bar` `` ...
- #
+ #
# Turns to:
- #
+ #
# ... type <code>`bar`</code> ...
return self._code_span_re.sub(self._code_span_sub, text)
@@ -1409,22 +1425,24 @@ class Markdown(object):
_strong_re = re.compile(r"(\*\*|__)(?=\S)(.+?[*_]*)(?<=\S)\1", re.S)
_em_re = re.compile(r"(\*|_)(?=\S)(.+?)(?<=\S)\1", re.S)
- #_spoiler_re = re.compile(r"###(?=\S)(.+?[*_]*)(?<=\S)###", re.S)
-
- _code_friendly_strong_re = re.compile(r"\*\*(?=\S)(.+?[*_]*)(?<=\S)\*\*", re.S)
+ # _spoiler_re = re.compile(r"###(?=\S)(.+?[*_]*)(?<=\S)###", re.S)
+
+ _code_friendly_strong_re = re.compile(
+ r"\*\*(?=\S)(.+?[*_]*)(?<=\S)\*\*", re.S)
_code_friendly_em_re = re.compile(r"\*(?=\S)(.+?)(?<=\S)\*", re.S)
+
def _do_italics_and_bold(self, text):
# <strong> must go first:
if "code-friendly" in self.extras:
- text = self._code_friendly_strong_re.sub(r"<strong>\1</strong>", text)
+ text = self._code_friendly_strong_re.sub(
+ r"<strong>\1</strong>", text)
text = self._code_friendly_em_re.sub(r"<em>\1</em>", text)
else:
text = self._strong_re.sub(r"<strong>\2</strong>", text)
text = self._em_re.sub(r"<em>\2</em>", text)
-
+
#text = self._spoiler_re.sub("<del>\\1</del>", text)
return text
-
_block_quote_re = re.compile(r'''
( # Wrap whole match in \1
@@ -1435,19 +1453,20 @@ class Markdown(object):
)+
)
''', re.M | re.X)
- _bq_one_level_re = re.compile('^[ \t]*>[ \t]?', re.M);
+ _bq_one_level_re = re.compile('^[ \t]*>[ \t]?', re.M)
_html_pre_block_re = re.compile(r'(\s*<pre>.+?</pre>)', re.S)
+
def _dedent_two_spaces_sub(self, match):
return re.sub(r'(?m)^ ', '', match.group(1))
def _block_quote_sub(self, match):
bq = match.group(1)
- #bq = self._bq_one_level_re.sub('', bq) # trim one level of quoting
+ # bq = self._bq_one_level_re.sub('', bq) # trim one level of quoting
bq = self._ws_only_line_re.sub('', bq) # trim whitespace-only lines
bq = bq.strip('\n')
bq = self._run_span_gamut(bq)
- #bq = self._run_block_gamut(bq) # recurse
+ # bq = self._run_block_gamut(bq) # recurse
bq = re.sub('(?m)^', ' ', bq)
# These leading spaces screw with <pre> content, so we need to fix that:
@@ -1482,16 +1501,18 @@ class Markdown(object):
# consider numeric bullets (e.g. "1." and "2.") to be
# equal.
if (li and len(li.group(2)) <= 3 and li.group("next_marker")
- and li.group("marker")[-1] == li.group("next_marker")[-1]):
+ and li.group("marker")[-1] == li.group("next_marker")[-1]):
start = li.start()
- cuddled_list = self._do_lists(graf[start:]).rstrip("\n")
- assert cuddled_list.startswith("<ul>") or cuddled_list.startswith("<ol>")
+ cuddled_list = self._do_lists(
+ graf[start:]).rstrip("\n")
+ assert cuddled_list.startswith(
+ "<ul>") or cuddled_list.startswith("<ol>")
graf = graf[:start]
-
+
# Wrap <p> tags.
graf = self._run_span_gamut(graf)
grafs.append("<p>" + graf.lstrip(" \t") + "</p>")
-
+
if cuddled_list:
grafs.append(cuddled_list)
@@ -1510,9 +1531,9 @@ class Markdown(object):
footer.append('<li id="fn-%s">' % id)
footer.append(self._run_block_gamut(self.footnotes[id]))
backlink = ('<a href="#fnref-%s" '
- 'class="footnoteBackLink" '
- 'title="Jump back to footnote %d in the text.">'
- '&#8617;</a>' % (id, i+1))
+ 'class="footnoteBackLink" '
+ 'title="Jump back to footnote %d in the text.">'
+ '&#8617;</a>' % (id, i+1))
if footer[-1].endswith("</p>"):
footer[-1] = footer[-1][:-len("</p>")] \
+ '&nbsp;' + backlink + "</p>"
@@ -1535,7 +1556,7 @@ class Markdown(object):
# Smart processing for ampersands and angle brackets that need
# to be encoded.
text = self._ampersand_re.sub('&amp;', text)
-
+
# Encode naked <'s
text = self._naked_lt_re.sub('&lt;', text)
@@ -1551,6 +1572,7 @@ class Markdown(object):
return text
_auto_link_re = re.compile(r'<((https?|ftp):[^\'">\s]+)>', re.I)
+
def _auto_link_sub(self, match):
g1 = match.group(1)
return '<a href="%s">%s</a>' % (g1, g1)
@@ -1565,6 +1587,7 @@ class Markdown(object):
)
>
""", re.I | re.X | re.U)
+
def _auto_email_link_sub(self, match):
return self._encode_email_address(
self._unescape_special_chars(match.group(1)))
@@ -1593,7 +1616,7 @@ class Markdown(object):
addr = '<a href="%s">%s</a>' \
% (''.join(chars), ''.join(chars[7:]))
return addr
-
+
def _do_link_patterns(self, text):
"""Caveat emptor: there isn't much guarding against link
patterns being formed inside other standard Markdown links, e.g.
@@ -1614,7 +1637,7 @@ class Markdown(object):
for (start, end), href in reversed(replacements):
escaped_href = (
href.replace('"', '&quot;') # b/c of attr quote
- # To avoid markdown <em> and <strong>:
+ # To avoid markdown <em> and <strong>:
.replace('*', g_escape_table['*'])
.replace('_', g_escape_table['_']))
link = '<a href="%s">%s</a>' % (escaped_href, text[start:end])
@@ -1624,7 +1647,7 @@ class Markdown(object):
for hash, link in link_from_hash.items():
text = text.replace(hash, link)
return text
-
+
def _unescape_special_chars(self, text):
# Swap back in all the special characters we've hidden.
for ch, hash in g_escape_table.items():
@@ -1651,7 +1674,7 @@ class MarkdownWithExtras(Markdown):
extras = ["footnotes", "code-color"]
-#---- internal support functions
+# ---- internal support functions
class UnicodeWithAttrs(unicode):
"""A subclass of unicode used for the return value of conversion to
@@ -1662,12 +1685,12 @@ class UnicodeWithAttrs(unicode):
@property
def toc_html(self):
"""Return the HTML for the current TOC.
-
+
This expects the `_toc` attribute to have been set on this instance.
"""
if self._toc is None:
return None
-
+
def indent():
return ' ' * (len(h_stack) - 1)
lines = []
@@ -1696,11 +1719,13 @@ class UnicodeWithAttrs(unicode):
_slugify_strip_re = re.compile(r'[^\w\s-]')
_slugify_hyphenate_re = re.compile(r'[-\s]+')
+
+
def _slugify(value):
"""
Normalizes string, converts to lowercase, removes non-alpha characters,
and converts spaces to hyphens.
-
+
From Django's "django/template/defaultfilters.py".
"""
import unicodedata
@@ -1709,8 +1734,11 @@ def _slugify(value):
return _slugify_hyphenate_re.sub('-', value)
# From http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52549
+
+
def _curry(*args, **kwargs):
function, args = args[0], args[1:]
+
def result(*rest, **kwrest):
combined = kwargs.copy()
combined.update(kwrest)
@@ -1718,6 +1746,8 @@ def _curry(*args, **kwargs):
return result
# Recipe: regex_from_encoded_pattern (1.0)
+
+
def _regex_from_encoded_pattern(s):
"""'foo' -> re.compile(re.escape('foo'))
'/foo/' -> re.compile('foo')
@@ -1743,30 +1773,33 @@ def _regex_from_encoded_pattern(s):
"(must be one of '%s')"
% (char, s, ''.join(flag_from_char.keys())))
return re.compile(s[1:idx], flags)
- else: # not an encoded regex
+ else: # not an encoded regex
return re.compile(re.escape(s))
# Recipe: dedent (0.1.2)
+
+
def _dedentlines(lines, tabsize=8, skip_first_line=False):
"""_dedentlines(lines, tabsize=8, skip_first_line=False) -> dedented lines
-
+
"lines" is a list of lines to dedent.
"tabsize" is the tab width to use for indent width calculations.
"skip_first_line" is a boolean indicating if the first line should
be skipped for calculating the indent width and for dedenting.
This is sometimes useful for docstrings and similar.
-
+
Same as dedent() except operates on a sequence of lines. Note: the
lines list is modified **in-place**.
"""
DEBUG = False
- if DEBUG:
+ if DEBUG:
print "dedent: dedent(..., tabsize=%d, skip_first_line=%r)"\
% (tabsize, skip_first_line)
indents = []
margin = None
for i, line in enumerate(lines):
- if i == 0 and skip_first_line: continue
+ if i == 0 and skip_first_line:
+ continue
indent = 0
for ch in line:
if ch == ' ':
@@ -1774,21 +1807,24 @@ def _dedentlines(lines, tabsize=8, skip_first_line=False):
elif ch == '\t':
indent += tabsize - (indent % tabsize)
elif ch in '\r\n':
- continue # skip all-whitespace lines
+ continue # skip all-whitespace lines
else:
break
else:
- continue # skip all-whitespace lines
- if DEBUG: print "dedent: indent=%d: %r" % (indent, line)
+ continue # skip all-whitespace lines
+ if DEBUG:
+ print "dedent: indent=%d: %r" % (indent, line)
if margin is None:
margin = indent
else:
margin = min(margin, indent)
- if DEBUG: print "dedent: margin=%r" % margin
+ if DEBUG:
+ print "dedent: margin=%r" % margin
if margin is not None and margin > 0:
for i, line in enumerate(lines):
- if i == 0 and skip_first_line: continue
+ if i == 0 and skip_first_line:
+ continue
removed = 0
for j, ch in enumerate(line):
if ch == ' ':
@@ -1796,7 +1832,8 @@ def _dedentlines(lines, tabsize=8, skip_first_line=False):
elif ch == '\t':
removed += tabsize - (removed % tabsize)
elif ch in '\r\n':
- if DEBUG: print "dedent: %r: EOL -> strip up to EOL" % line
+ if DEBUG:
+ print "dedent: %r: EOL -> strip up to EOL" % line
lines[i] = lines[i][j:]
break
else:
@@ -1817,6 +1854,7 @@ def _dedentlines(lines, tabsize=8, skip_first_line=False):
lines[i] = lines[i][removed:]
return lines
+
def _dedent(text, tabsize=8, skip_first_line=False):
"""_dedent(text, tabsize=8, skip_first_line=False) -> dedented text
@@ -1825,7 +1863,7 @@ def _dedent(text, tabsize=8, skip_first_line=False):
"skip_first_line" is a boolean indicating if the first line should
be skipped for calculating the indent width and for dedenting.
This is sometimes useful for docstrings and similar.
-
+
textwrap.dedent(s), but don't expand tabs to spaces
"""
lines = text.splitlines(1)
@@ -1834,28 +1872,31 @@ def _dedent(text, tabsize=8, skip_first_line=False):
class _memoized(object):
- """Decorator that caches a function's return value each time it is called.
- If called later with the same arguments, the cached value is returned, and
- not re-evaluated.
-
- http://wiki.python.org/moin/PythonDecoratorLibrary
- """
- def __init__(self, func):
- self.func = func
- self.cache = {}
- def __call__(self, *args):
- try:
- return self.cache[args]
- except KeyError:
- self.cache[args] = value = self.func(*args)
- return value
- except TypeError:
- # uncachable -- for instance, passing a list as an argument.
- # Better to not cache than to blow up entirely.
- return self.func(*args)
- def __repr__(self):
- """Return the function's docstring."""
- return self.func.__doc__
+ """Decorator that caches a function's return value each time it is called.
+ If called later with the same arguments, the cached value is returned, and
+ not re-evaluated.
+
+ http://wiki.python.org/moin/PythonDecoratorLibrary
+ """
+
+ def __init__(self, func):
+ self.func = func
+ self.cache = {}
+
+ def __call__(self, *args):
+ try:
+ return self.cache[args]
+ except KeyError:
+ self.cache[args] = value = self.func(*args)
+ return value
+ except TypeError:
+ # uncachable -- for instance, passing a list as an argument.
+ # Better to not cache than to blow up entirely.
+ return self.func(*args)
+
+ def __repr__(self):
+ """Return the function's docstring."""
+ return self.func.__doc__
def _xml_oneliner_re_from_tab_width(tab_width):
@@ -1877,10 +1918,13 @@ def _xml_oneliner_re_from_tab_width(tab_width):
(?=\n{2,}|\Z) # followed by a blank line or end of document
)
""" % (tab_width - 1), re.X)
+
+
_xml_oneliner_re_from_tab_width = _memoized(_xml_oneliner_re_from_tab_width)
+
def _hr_tag_re_from_tab_width(tab_width):
- return re.compile(r"""
+ return re.compile(r"""
(?:
(?<=\n\n) # Starting after a blank line
| # or
@@ -1896,6 +1940,8 @@ def _hr_tag_re_from_tab_width(tab_width):
(?=\n{2,}|\Z) # followed by a blank line or end of document
)
""" % (tab_width - 1), re.X)
+
+
_hr_tag_re_from_tab_width = _memoized(_hr_tag_re_from_tab_width)
@@ -1913,18 +1959,20 @@ def _xml_encode_email_char_at_random(ch):
return '&#%s;' % ord(ch)
-
#---- mainline
class _NoReflowFormatter(optparse.IndentedHelpFormatter):
"""An optparse formatter that does NOT reflow the description."""
+
def format_description(self, description):
return description or ""
+
def _test():
import doctest
doctest.testmod()
+
def main(argv=None):
if argv is None:
argv = sys.argv
@@ -1934,14 +1982,14 @@ def main(argv=None):
usage = "usage: %prog [PATHS...]"
version = "%prog "+__version__
parser = optparse.OptionParser(prog="markdown2", usage=usage,
- version=version, description=cmdln_desc,
- formatter=_NoReflowFormatter())
+ version=version, description=cmdln_desc,
+ formatter=_NoReflowFormatter())
parser.add_option("-v", "--verbose", dest="log_level",
action="store_const", const=logging.DEBUG,
help="more verbose output")
parser.add_option("--encoding",
help="specify encoding of text content")
- parser.add_option("--html4tags", action="store_true", default=False,
+ parser.add_option("--html4tags", action="store_true", default=False,
help="use HTML 4 style for empty element tags")
parser.add_option("-s", "--safe", metavar="MODE", dest="safe_mode",
help="sanitize literal HTML: 'escape' escapes "
@@ -1990,8 +2038,10 @@ def main(argv=None):
f = open(opts.link_patterns_file)
try:
for i, line in enumerate(f.readlines()):
- if not line.strip(): continue
- if line.lstrip().startswith("#"): continue
+ if not line.strip():
+ continue
+ if line.lstrip().startswith("#"):
+ continue
try:
pat, href = line.rstrip().rsplit(None, 1)
except ValueError:
@@ -2025,7 +2075,7 @@ def main(argv=None):
html.encode(sys.stdout.encoding or "utf-8", 'xmlcharrefreplace'))
if extras and "toc" in extras:
log.debug("toc_html: " +
- html.toc_html.encode(sys.stdout.encoding or "utf-8", 'xmlcharrefreplace'))
+ html.toc_html.encode(sys.stdout.encoding or "utf-8", 'xmlcharrefreplace'))
if opts.compare:
test_dir = join(dirname(dirname(abspath(__file__))), "test")
if exists(join(test_dir, "test_markdown2.py")):
@@ -2040,5 +2090,4 @@ def main(argv=None):
if __name__ == "__main__":
- sys.exit( main(sys.argv) )
-
+ sys.exit(main(sys.argv))
diff --git a/cgi/oekaki.py b/cgi/oekaki.py
index e20316c..e063c61 100644
--- a/cgi/oekaki.py
+++ b/cgi/oekaki.py
@@ -11,206 +11,219 @@ from formatting import *
from template import *
from post import *
+
def oekaki(self, path_split):
- """
- Este script hace todo lo que tiene que hacer con los
- archivos de Oekaki.
- """
- page = ''
- skiptemplate = False
-
- if len(path_split) > 2:
- # Inicia el applet. Lo envia luego a este mismo script, a "Finish".
- if path_split[2] == 'paint':
- # Veamos que applet usar
- applet = self.formdata['oek_applet'].split('|')
-
- applet_name = applet[0]
-
- if len(applet) > 1 and applet[1] == 'y':
- applet_str = 'pro'
- else:
- applet_str = ''
-
- if len(applet) > 2 and applet[2] == 'y':
- use_selfy = True
- else:
- use_selfy = False
-
- # Obtenemos el board
- board = setBoard(self.formdata['board'])
-
- if board['allow_oekaki'] != '1':
- raise UserError, 'Esta sección no soporta oekaki.'
-
- # Veamos a quien le estamos respondiendo
- try:
- parentid = int(self.formdata['parent'])
- except:
- parentid = 0
-
- # Vemos si el usuario quiere una animacion
- if 'oek_animation' in self.formdata.keys():
- animation = True
- animation_str = 'animation'
- else:
- animation = False
- animation_str = ''
-
- # Nos aseguramos que la entrada es numerica
- try:
- width = int(self.formdata['oek_x'])
- height = int(self.formdata['oek_y'])
- except:
- raise UserError, 'Valores de tamaño inválidos (%s)' % repr(self.formdata)
-
- params = {
- 'dir_resource': Settings.BOARDS_URL + 'oek_temp/',
- 'tt.zip': 'tt_def.zip',
- 'res.zip': 'res.zip',
- 'MAYSCRIPT': 'true',
- 'scriptable': 'true',
- 'tools': applet_str,
- 'layer_count': '5',
- 'undo': '90',
- 'undo_in_mg': '15',
- 'url_save': Settings.BOARDS_URL + 'oek_temp/save.py?applet=shi'+applet_str,
- 'poo': 'false',
- 'send_advance': 'true',
- 'send_language': 'utf8',
- 'send_header': '',
- 'send_header_image_type': 'false',
- 'thumbnail_type': animation_str,
- 'image_jpeg': 'false',
- 'image_size': '92',
- 'compress_level': '4'
- }
-
- if 'oek_edit' in self.formdata.keys():
- # Si hay que editar, cargar la imagen correspondiente en el canvas
- pid = int(self.formdata['oek_edit'])
- post = FetchOne('SELECT id, file, image_width, image_height FROM posts WHERE id = %d AND boardid = %s' % (pid, board['id']))
- editfile = Settings.BOARDS_URL + board['dir'] + '/src/' + post['file']
-
- params['image_canvas'] = edit
- params['image_width'] = file['image_width']
- params['image_height'] = file['image_height']
- width = int(file['image_width'])
- height = int(file['image_height'])
- else:
- editfile = None
- params['image_width'] = str(width)
- params['image_height'] = str(height)
-
- if 'canvas' in self.formdata.keys():
- editfile = self.formdata['canvas']
-
- # Darle las dimensiones al exit script
- params['url_exit'] = Settings.CGI_URL + 'oekaki/finish/' + board['dir'] + '/' + str(parentid)
-
- page += renderTemplate("paint.html", {'applet': applet_name, 'edit': editfile, 'replythread': parentid, 'width': width, 'height': height, 'params': params, 'selfy': use_selfy})
- elif path_split[2] == 'save':
- # path splits:
- # 3: Board
- # 4: Data format
- board = setBoard(path_split[3])
-
- ip = inet_aton(self.environ["REMOTE_ADDR"])
- fname = os.path.join(Settings.IMAGES_DIR, board['dir'], "temp", str(ip) + ".png")
-
- if path_split[4] == 'b64':
- page = write_from_base64(fname, self.formdata['image'])
- elif path_split[4] == 'paintbbs':
- page = write_from_shi(fname, self.environ["wsgi.input"])
- elif path_split[2] == 'finish':
- # path splits:
- # 3: Board
- # 4: Parentid
- if len(path_split) >= 5:
- # Al terminar de dibujar, llegamos aqui. Damos la opcion de postearlo.
- board = setBoard(path_split[3])
- try:
- parentid = int(path_split[4])
- except:
- parentid = None
-
- if board['allow_oekaki'] != '1':
- raise UserError, 'Esta sección no soporta oekaki.'
-
- ts = int(time.time())
- ip = inet_aton(self.environ["REMOTE_ADDR"])
- fname = os.path.join(Settings.IMAGES_DIR, board['dir'], "temp", str(ip) + ".png")
- oek = 'no'
-
- if 'filebase' in self.formdata:
- write_from_base64(fname, self.formdata['filebase'])
-
- if os.path.isfile(fname):
- oek = ip
-
- try:
- timetaken = timestamp() - int(path_split[5][:-2])
- except:
- timetaken = 0
-
- page += renderTemplate("board.html", {"threads": None, "oek_finish": oek, "replythread": parentid, "ts": ts})
- elif path_split[2] == 'animation':
- try:
- board = setBoard(path_split[3])
- file = int(path_split[4])
- except:
- raise UserError, 'Board o archivo de animación inválido.'
-
- params = {
- 'pch_file': Settings.BOARDS_URL + board['dir'] + '/src/' + str(file) + '.pch',
- 'run': 'true',
- 'buffer_progress': 'false',
- 'buffer_canvas': 'true',
- 'speed': '2',
- 'res.zip': Settings.BOARDS_URL + 'oek_temp/res/' +'res.zip',
- 'tt.zip': Settings.BOARDS_URL + 'oek_temp/res/' + 'tt.zip',
- 'tt_size': '31'
- }
- page += '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">' + \
- '<html xmlns="http://www.w3.org/1999/xhtml">\n<head><style type="text/css">html, body{margin: 0; padding: 0;height:100%;} .full{width:100%;height:100%;}</style>\n<title>Bienvenido a Internet | Oekaki</title>\n</head>\n' + \
- '<body bgcolor="#CFCFFF" text="#800000" link="#003399" vlink="#808080" alink="#11FF11">\n' + \
- '<table cellpadding="0" cellspacing="0" class="full"><tr><td class="full">\n'
- page += '<applet name="pch" code="pch2.PCHViewer.class" archive="' + Settings.BOARDS_URL + 'oek_temp/PCHViewer123.jar" width="100%" height="100%">'
- for key in params.keys():
- page += '<param name="' + key + '" value="' + cleanString(params[key]) + '" />' + "\n"
- page += '<div align="center">Java must be installed and enabled to use this applet. Please refer to our Java setup tutorial for more information.</div>'
- page += '</applet>\n</td></tr></table>\n</body>\n</html>'
-
- if not skiptemplate:
- self.output = page
+ """
+ Este script hace todo lo que tiene que hacer con los
+ archivos de Oekaki.
+ """
+ page = ''
+ skiptemplate = False
+
+ if len(path_split) > 2:
+ # Inicia el applet. Lo envia luego a este mismo script, a "Finish".
+ if path_split[2] == 'paint':
+ # Veamos que applet usar
+ applet = self.formdata['oek_applet'].split('|')
+
+ applet_name = applet[0]
+
+ if len(applet) > 1 and applet[1] == 'y':
+ applet_str = 'pro'
+ else:
+ applet_str = ''
+
+ if len(applet) > 2 and applet[2] == 'y':
+ use_selfy = True
+ else:
+ use_selfy = False
+
+ # Obtenemos el board
+ board = setBoard(self.formdata['board'])
+
+ if board['allow_oekaki'] != '1':
+ raise UserError, 'Esta sección no soporta oekaki.'
+
+ # Veamos a quien le estamos respondiendo
+ try:
+ parentid = int(self.formdata['parent'])
+ except:
+ parentid = 0
+
+ # Vemos si el usuario quiere una animacion
+ if 'oek_animation' in self.formdata.keys():
+ animation = True
+ animation_str = 'animation'
+ else:
+ animation = False
+ animation_str = ''
+
+ # Nos aseguramos que la entrada es numerica
+ try:
+ width = int(self.formdata['oek_x'])
+ height = int(self.formdata['oek_y'])
+ except:
+ raise UserError, 'Valores de tamaño inválidos (%s)' % repr(
+ self.formdata)
+
+ params = {
+ 'dir_resource': Settings.BOARDS_URL + 'oek_temp/',
+ 'tt.zip': 'tt_def.zip',
+ 'res.zip': 'res.zip',
+ 'MAYSCRIPT': 'true',
+ 'scriptable': 'true',
+ 'tools': applet_str,
+ 'layer_count': '5',
+ 'undo': '90',
+ 'undo_in_mg': '15',
+ 'url_save': Settings.BOARDS_URL + 'oek_temp/save.py?applet=shi'+applet_str,
+ 'poo': 'false',
+ 'send_advance': 'true',
+ 'send_language': 'utf8',
+ 'send_header': '',
+ 'send_header_image_type': 'false',
+ 'thumbnail_type': animation_str,
+ 'image_jpeg': 'false',
+ 'image_size': '92',
+ 'compress_level': '4'
+ }
+
+ if 'oek_edit' in self.formdata.keys():
+ # Si hay que editar, cargar la imagen correspondiente en el canvas
+ pid = int(self.formdata['oek_edit'])
+ post = FetchOne(
+ 'SELECT id, file, image_width, image_height FROM posts WHERE id = %d AND boardid = %s' % (pid, board['id']))
+ editfile = Settings.BOARDS_URL + \
+ board['dir'] + '/src/' + post['file']
+
+ params['image_canvas'] = edit
+ params['image_width'] = file['image_width']
+ params['image_height'] = file['image_height']
+ width = int(file['image_width'])
+ height = int(file['image_height'])
+ else:
+ editfile = None
+ params['image_width'] = str(width)
+ params['image_height'] = str(height)
+
+ if 'canvas' in self.formdata.keys():
+ editfile = self.formdata['canvas']
+
+ # Darle las dimensiones al exit script
+ params['url_exit'] = Settings.CGI_URL + \
+ 'oekaki/finish/' + board['dir'] + '/' + str(parentid)
+
+ page += renderTemplate("paint.html", {'applet': applet_name, 'edit': editfile, 'replythread': parentid,
+ 'width': width, 'height': height, 'params': params, 'selfy': use_selfy})
+ elif path_split[2] == 'save':
+ # path splits:
+ # 3: Board
+ # 4: Data format
+ board = setBoard(path_split[3])
+
+ ip = inet_aton(self.environ["REMOTE_ADDR"])
+ fname = os.path.join(Settings.IMAGES_DIR,
+ board['dir'], "temp", str(ip) + ".png")
+
+ if path_split[4] == 'b64':
+ page = write_from_base64(fname, self.formdata['image'])
+ elif path_split[4] == 'paintbbs':
+ page = write_from_shi(fname, self.environ["wsgi.input"])
+ elif path_split[2] == 'finish':
+ # path splits:
+ # 3: Board
+ # 4: Parentid
+ if len(path_split) >= 5:
+ # Al terminar de dibujar, llegamos aqui. Damos la opcion de postearlo.
+ board = setBoard(path_split[3])
+ try:
+ parentid = int(path_split[4])
+ except:
+ parentid = None
+
+ if board['allow_oekaki'] != '1':
+ raise UserError, 'Esta sección no soporta oekaki.'
+
+ ts = int(time.time())
+ ip = inet_aton(self.environ["REMOTE_ADDR"])
+ fname = os.path.join(Settings.IMAGES_DIR,
+ board['dir'], "temp", str(ip) + ".png")
+ oek = 'no'
+
+ if 'filebase' in self.formdata:
+ write_from_base64(fname, self.formdata['filebase'])
+
+ if os.path.isfile(fname):
+ oek = ip
+
+ try:
+ timetaken = timestamp() - int(path_split[5][:-2])
+ except:
+ timetaken = 0
+
+ page += renderTemplate("board.html", {
+ "threads": None, "oek_finish": oek, "replythread": parentid, "ts": ts})
+ elif path_split[2] == 'animation':
+ try:
+ board = setBoard(path_split[3])
+ file = int(path_split[4])
+ except:
+ raise UserError, 'Board o archivo de animación inválido.'
+
+ params = {
+ 'pch_file': Settings.BOARDS_URL + board['dir'] + '/src/' + str(file) + '.pch',
+ 'run': 'true',
+ 'buffer_progress': 'false',
+ 'buffer_canvas': 'true',
+ 'speed': '2',
+ 'res.zip': Settings.BOARDS_URL + 'oek_temp/res/' + 'res.zip',
+ 'tt.zip': Settings.BOARDS_URL + 'oek_temp/res/' + 'tt.zip',
+ 'tt_size': '31'
+ }
+ page += '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">' + \
+ '<html xmlns="http://www.w3.org/1999/xhtml">\n<head><style type="text/css">html, body{margin: 0; padding: 0;height:100%;} .full{width:100%;height:100%;}</style>\n<title>Bienvenido a Internet | Oekaki</title>\n</head>\n' + \
+ '<body bgcolor="#CFCFFF" text="#800000" link="#003399" vlink="#808080" alink="#11FF11">\n' + \
+ '<table cellpadding="0" cellspacing="0" class="full"><tr><td class="full">\n'
+ page += '<applet name="pch" code="pch2.PCHViewer.class" archive="' + \
+ Settings.BOARDS_URL + 'oek_temp/PCHViewer123.jar" width="100%" height="100%">'
+ for key in params.keys():
+ page += '<param name="' + key + '" value="' + \
+ cleanString(params[key]) + '" />' + "\n"
+ page += '<div align="center">Java must be installed and enabled to use this applet. Please refer to our Java setup tutorial for more information.</div>'
+ page += '</applet>\n</td></tr></table>\n</body>\n</html>'
+
+ if not skiptemplate:
+ self.output = page
+
def write_from_base64(fname, data):
- # Skip header
- if data.startswith("data:image/png;base64,"):
- data = data[22:]
- data = data.replace(' ', '+')
- data = data.decode('base64')
- with open(fname, 'wb') as f:
- f.write(data)
- return "OK"
+ # Skip header
+ if data.startswith("data:image/png;base64,"):
+ data = data[22:]
+ data = data.replace(' ', '+')
+ data = data.decode('base64')
+ with open(fname, 'wb') as f:
+ f.write(data)
+ return "OK"
+
def write_from_shi(fname, fp):
- # Check data type
- type = fp.read(1)
- if type != 'P':
- return "UNSUPPORTED"
+ # Check data type
+ type = fp.read(1)
+ if type != 'P':
+ return "UNSUPPORTED"
- # Read header
- headerLength = int(fp.read(8))
- header = fp.read(headerLength)
+ # Read header
+ headerLength = int(fp.read(8))
+ header = fp.read(headerLength)
- # Read image data
- imgLength = int(fp.read(8))
- fp.read(2) # TODO: seek() isn't working for some reason. Debug.
- img = fp.read(imgLength)
+ # Read image data
+ imgLength = int(fp.read(8))
+ fp.read(2) # TODO: seek() isn't working for some reason. Debug.
+ img = fp.read(imgLength)
- # Write image
- with open(fname, 'wb') as f:
- f.write(img)
+ # Write image
+ with open(fname, 'wb') as f:
+ f.write(img)
- return "OK"
+ return "OK"
diff --git a/cgi/template.py b/cgi/template.py
index 0a7c530..f58e17f 100644
--- a/cgi/template.py
+++ b/cgi/template.py
@@ -2,116 +2,118 @@
import tenjin
import random
import re
-from tenjin.helpers import * # Used when templating
+from tenjin.helpers import * # Used when templating
from settings import Settings
from database import *
+
def renderTemplate(template, template_values={}, mobile=False, noindex=False):
- """
- Run Tenjin on the supplied template name, with the extra values
- template_values (if supplied)
- """
- values = {
- "title": Settings.NAME,
- "board": None,
- "board_name": None,
- "board_long": None,
- "is_page": "false",
- "noindex": None,
- "replythread": 0,
- "home_url": Settings.HOME_URL,
- "boards_url": Settings.BOARDS_URL,
- "images_url": Settings.IMAGES_URL,
- "static_url": Settings.STATIC_URL,
- "cgi_url": Settings.CGI_URL,
- "banner_url": None,
- "banner_width": None,
- "banner_height": None,
- "disable_name": None,
- "disable_subject": None,
- "styles": Settings.STYLES,
- "styles_default": Settings.STYLES_DEFAULT,
- "txt_styles": Settings.TXT_STYLES,
- "txt_styles_default": Settings.TXT_STYLES_DEFAULT,
- "pagenav": "",
- "reports_enable": Settings.REPORTS_ENABLE,
- "force_css": ""
- }
-
- engine = tenjin.Engine(pp=[tenjin.TrimPreprocessor(True)])
- board = Settings._.BOARD
-
- #if board:
- if template in ["board.html", "threadlist.html", "catalog.html", "kako.html", "paint.html"] or template[0:3] == "txt":
- # TODO HACK
- if board['dir'] == 'world' and not mobile and (template == 'txt_board.html' or template == 'txt_thread.html'):
- template = template[:-4] + 'en.html'
- elif board['dir'] == '2d' and template == 'board.html' and not mobile:
- template = template[:-4] + 'jp.html'
- elif board['dir'] == '0' and template == 'board.html' and not mobile:
- template = template[:-4] + '0.html'
+ """
+ Run Tenjin on the supplied template name, with the extra values
+ template_values (if supplied)
+ """
+ values = {
+ "title": Settings.NAME,
+ "board": None,
+ "board_name": None,
+ "board_long": None,
+ "is_page": "false",
+ "noindex": None,
+ "replythread": 0,
+ "home_url": Settings.HOME_URL,
+ "boards_url": Settings.BOARDS_URL,
+ "images_url": Settings.IMAGES_URL,
+ "static_url": Settings.STATIC_URL,
+ "cgi_url": Settings.CGI_URL,
+ "banner_url": None,
+ "banner_width": None,
+ "banner_height": None,
+ "disable_name": None,
+ "disable_subject": None,
+ "styles": Settings.STYLES,
+ "styles_default": Settings.STYLES_DEFAULT,
+ "txt_styles": Settings.TXT_STYLES,
+ "txt_styles_default": Settings.TXT_STYLES_DEFAULT,
+ "pagenav": "",
+ "reports_enable": Settings.REPORTS_ENABLE,
+ "force_css": ""
+ }
+
+ engine = tenjin.Engine(pp=[tenjin.TrimPreprocessor(True)])
+ board = Settings._.BOARD
+
+ # if board:
+ if template in ["board.html", "threadlist.html", "catalog.html", "kako.html", "paint.html"] or template[0:3] == "txt":
+ # TODO HACK
+ if board['dir'] == 'world' and not mobile and (template == 'txt_board.html' or template == 'txt_thread.html'):
+ template = template[:-4] + 'en.html'
+ elif board['dir'] == '2d' and template == 'board.html' and not mobile:
+ template = template[:-4] + 'jp.html'
+ elif board['dir'] == '0' and template == 'board.html' and not mobile:
+ template = template[:-4] + '0.html'
+
+ try:
+ banners = Settings.banners[board['dir']]
+ if banners:
+ banner_width = Settings.banners[board['dir']]
+ banner_height = Settings.banners[board['dir']]
+ except KeyError:
+ banners = Settings.banners['default']
+ banner_width = Settings.banners['default']
+ banner_height = Settings.banners['default']
+
+ values.update({
+ "board": board["dir"],
+ "board_name": board["name"],
+ "board_long": board["longname"],
+ "board_type": board["board_type"],
+ "oek_finish": 0,
+ "disable_name": (board["disable_name"] == '1'),
+ "disable_subject": (board["disable_subject"] == '1'),
+ "default_subject": board["subject"],
+ "postarea_desc": board["postarea_desc"],
+ "postarea_extra": board["postarea_extra"],
+ "allow_images": (board["allow_images"] == '1'),
+ "allow_image_replies": (board["allow_image_replies"] == '1'),
+ "allow_noimage": (board["allow_noimage"] == '1'),
+ "allow_spoilers": (board["allow_spoilers"] == '1'),
+ "allow_oekaki": (board["allow_oekaki"] == '1'),
+ "archive": (board["archive"] == '1'),
+ "force_css": board["force_css"],
+ "noindex": (board["secret"] == '1'),
+ "useid": board["useid"],
+ "maxsize": board["maxsize"],
+ "maxage": board["maxage"],
+ "maxdimensions": board["thumb_px"],
+ "supported_filetypes": board["filetypes_ext"],
+ "prevrange": '',
+ "nextrange": '',
+ })
+ else:
+ banners = Settings.banners['default']
+ banner_width = Settings.banners['default']
+ banner_height = Settings.banners['default']
+
+ if Settings.ENABLE_BANNERS:
+ if len(banners) > 1:
+ random_number = random.randrange(0, len(banners))
+ BANNER_URL = Settings.banners_folder + banners[random_number][0]
+ BANNER_WIDTH = banners[random_number][1]
+ BANNER_HEIGHT = banners[random_number][2]
+ else:
+ BANNER_URL = Settings.banners_folder + banners[0][0]
+ BANNER_WIDTH = banners[0][1]
+ BANNER_HEIGHT = banners[0][2]
+
+ values.update({"banner_url": BANNER_URL,
+ "banner_width": BANNER_WIDTH, "banner_height": BANNER_HEIGHT})
- try:
- banners = Settings.banners[board['dir']]
- if banners:
- banner_width = Settings.banners[board['dir']]
- banner_height = Settings.banners[board['dir']]
- except KeyError:
- banners = Settings.banners['default']
- banner_width = Settings.banners['default']
- banner_height = Settings.banners['default']
+ values.update(template_values)
- values.update({
- "board": board["dir"],
- "board_name": board["name"],
- "board_long": board["longname"],
- "board_type": board["board_type"],
- "oek_finish": 0,
- "disable_name": (board["disable_name"] == '1'),
- "disable_subject": (board["disable_subject"] == '1'),
- "default_subject": board["subject"],
- "postarea_desc": board["postarea_desc"],
- "postarea_extra": board["postarea_extra"],
- "allow_images": (board["allow_images"] == '1'),
- "allow_image_replies": (board["allow_image_replies"] == '1'),
- "allow_noimage": (board["allow_noimage"] == '1'),
- "allow_spoilers": (board["allow_spoilers"] == '1'),
- "allow_oekaki": (board["allow_oekaki"] == '1'),
- "archive": (board["archive"] == '1'),
- "force_css": board["force_css"],
- "noindex": (board["secret"] == '1'),
- "useid": board["useid"],
- "maxsize": board["maxsize"],
- "maxage": board["maxage"],
- "maxdimensions": board["thumb_px"],
- "supported_filetypes": board["filetypes_ext"],
- "prevrange": '',
- "nextrange": '',
- })
- else:
- banners = Settings.banners['default']
- banner_width = Settings.banners['default']
- banner_height = Settings.banners['default']
-
- if Settings.ENABLE_BANNERS:
- if len(banners) > 1:
- random_number = random.randrange(0, len(banners))
- BANNER_URL = Settings.banners_folder + banners[random_number][0]
- BANNER_WIDTH = banners[random_number][1]
- BANNER_HEIGHT = banners[random_number][2]
+ if mobile:
+ template_folder = "templates/mobile/"
else:
- BANNER_URL = Settings.banners_folder + banners[0][0]
- BANNER_WIDTH = banners[0][1]
- BANNER_HEIGHT = banners[0][2]
-
- values.update({"banner_url": BANNER_URL, "banner_width": BANNER_WIDTH, "banner_height": BANNER_HEIGHT})
-
- values.update(template_values)
+ template_folder = "templates/"
- if mobile:
- template_folder = "templates/mobile/"
- else:
- template_folder = "templates/"
-
- return engine.render(template_folder + template, values) \ No newline at end of file
+ return engine.render(template_folder + template, values)
diff --git a/cgi/tenjin.py b/cgi/tenjin.py
index db8cdde..ddc12bb 100644
--- a/cgi/tenjin.py
+++ b/cgi/tenjin.py
@@ -1,26 +1,26 @@
##
-## $Release: 1.1.1 $
-## $Copyright: copyright(c) 2007-2012 kuwata-lab.com all rights reserved. $
-## $License: MIT License $
+# $Release: 1.1.1 $
+# $Copyright: copyright(c) 2007-2012 kuwata-lab.com all rights reserved. $
+# $License: MIT License $
##
-## Permission is hereby granted, free of charge, to any person obtaining
-## a copy of this software and associated documentation files (the
-## "Software"), to deal in the Software without restriction, including
-## without limitation the rights to use, copy, modify, merge, publish,
-## distribute, sublicense, and/or sell copies of the Software, and to
-## permit persons to whom the Software is furnished to do so, subject to
-## the following conditions:
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
##
-## The above copyright notice and this permission notice shall be
-## included in all copies or substantial portions of the Software.
+# The above copyright notice and this permission notice shall be
+# included in all copies or substantial portions of the Software.
##
-## THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-## EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-## MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-## NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
-## LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-## OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
-## WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
##
"""Very fast and light-weight template engine based embedded Python.
@@ -29,15 +29,19 @@
http://www.kuwata-lab.com/tenjin/pytenjin-examples.html
"""
-__version__ = "$Release: 1.1.1 $"[10:-2]
-__license__ = "$License: MIT License $"[10:-2]
-__all__ = ('Template', 'Engine', )
+__version__ = "$Release: 1.1.1 $"[10:-2]
+__license__ = "$License: MIT License $"[10:-2]
+__all__ = ('Template', 'Engine', )
-import sys, os, re, time, marshal
-from time import time as _time
-from os.path import getmtime as _getmtime
from os.path import isfile as _isfile
+from os.path import getmtime as _getmtime
+from time import time as _time
+import sys
+import os
+import re
+import time
+import marshal
random = pickle = unquote = None # lazy import
python3 = sys.version_info[0] == 3
python2 = sys.version_info[0] == 2
@@ -46,12 +50,13 @@ logger = None
##
-## utilities
+# utilities
##
def _write_binary_file(filename, content):
global random
- if random is None: from random import random
+ if random is None:
+ from random import random
tmpfile = filename + str(random())[1:]
f = open(tmpfile, 'w+b') # on windows, 'w+b' is preffered than 'wb'
try:
@@ -62,9 +67,11 @@ def _write_binary_file(filename, content):
try:
os.rename(tmpfile, filename)
except:
- os.remove(filename) # on windows, existing file should be removed before renaming
+ # on windows, existing file should be removed before renaming
+ os.remove(filename)
os.rename(tmpfile, filename)
+
def _read_binary_file(filename):
f = open(filename, 'rb')
try:
@@ -72,25 +79,32 @@ def _read_binary_file(filename):
finally:
f.close()
+
codecs = None # lazy import
+
def _read_text_file(filename, encoding=None):
global codecs
- if not codecs: import codecs
+ if not codecs:
+ import codecs
f = codecs.open(filename, encoding=(encoding or 'utf-8'))
try:
return f.read()
finally:
f.close()
+
def _read_template_file(filename, encoding=None):
- s = _read_binary_file(filename) ## binary(=str)
- if encoding: s = s.decode(encoding) ## binary(=str) to unicode
+ s = _read_binary_file(filename) # binary(=str)
+ if encoding:
+ s = s.decode(encoding) # binary(=str) to unicode
return s
+
_basestring = basestring
-_unicode = unicode
-_bytes = str
+_unicode = unicode
+_bytes = str
+
def _ignore_not_found_error(f, default=None):
try:
@@ -100,6 +114,7 @@ def _ignore_not_found_error(f, default=None):
return default
raise
+
def create_module(module_name, dummy_func=None, **kwargs):
"""ex. mod = create_module('tenjin.util')"""
try:
@@ -116,12 +131,13 @@ def create_module(module_name, dummy_func=None, **kwargs):
exec(dummy_func.func_code, mod.__dict__)
return mod
+
def _raise(exception_class, *args):
raise exception_class(*args)
##
-## helper method's module
+# helper method's module
##
def _dummy():
@@ -142,13 +158,16 @@ def _dummy():
"""
if encode:
if decode:
- raise ValueError("can't specify both encode and decode encoding.")
+ raise ValueError(
+ "can't specify both encode and decode encoding.")
else:
def to_str(val, _str=str, _unicode=unicode, _isa=isinstance, _encode=encode):
"""Convert val into string or return '' if None. Unicode will be encoded into binary(=str)."""
- if _isa(val, _str): return val
- if val is None: return ''
- #if _isa(val, _unicode): return val.encode(_encode) # unicode to binary(=str)
+ if _isa(val, _str):
+ return val
+ if val is None:
+ return ''
+ # if _isa(val, _unicode): return val.encode(_encode) # unicode to binary(=str)
if _isa(val, _unicode):
return val.encode(_encode) # unicode to binary(=str)
return _str(val)
@@ -156,18 +175,23 @@ def _dummy():
if decode:
def to_str(val, _str=str, _unicode=unicode, _isa=isinstance, _decode=decode):
"""Convert val into string or return '' if None. Binary(=str) will be decoded into unicode."""
- #if _isa(val, _str): return val.decode(_decode) # binary(=str) to unicode
+ # if _isa(val, _str): return val.decode(_decode) # binary(=str) to unicode
if _isa(val, _str):
return val.decode(_decode)
- if val is None: return ''
- if _isa(val, _unicode): return val
+ if val is None:
+ return ''
+ if _isa(val, _unicode):
+ return val
return _unicode(val)
else:
def to_str(val, _str=str, _unicode=unicode, _isa=isinstance):
"""Convert val into string or return '' if None. Both binary(=str) and unicode will be retruned as-is."""
- if _isa(val, _str): return val
- if val is None: return ''
- if _isa(val, _unicode): return val
+ if _isa(val, _str):
+ return val
+ if val is None:
+ return ''
+ if _isa(val, _unicode):
+ return val
return _str(val)
return to_str
@@ -197,21 +221,21 @@ def _dummy():
class CaptureContext(object):
def __init__(self, name, store_to_context=True, lvars=None):
- self.name = name
+ self.name = name
self.store_to_context = store_to_context
self.lvars = lvars or sys._getframe(1).f_locals
def __enter__(self):
lvars = self.lvars
self._buf_orig = lvars['_buf']
- lvars['_buf'] = _buf = []
+ lvars['_buf'] = _buf = []
lvars['_extend'] = _buf.extend
return self
def __exit__(self, *args):
lvars = self.lvars
_buf = lvars['_buf']
- lvars['_buf'] = self._buf_orig
+ lvars['_buf'] = self._buf_orig
lvars['_extend'] = self._buf_orig.extend
lvars[self.name] = self.captured = ''.join(_buf)
if self.store_to_context and '_context' in lvars:
@@ -236,7 +260,8 @@ def _dummy():
lvars = sys._getframe(_depth).f_locals
capture_context = lvars.pop('_capture_context', None)
if not capture_context:
- raise Exception('stop_capture(): start_capture() is not called before.')
+ raise Exception(
+ 'stop_capture(): start_capture() is not called before.')
capture_context.store_to_context = store_to_context
capture_context.__exit__()
return capture_context.captured
@@ -270,19 +295,25 @@ def _dummy():
global unquote
if unquote is None:
from urllib import unquote
- dct = { 'lt':'<', 'gt':'>', 'amp':'&', 'quot':'"', '#039':"'", }
+ dct = {'lt': '<', 'gt': '>', 'amp': '&', 'quot': '"', '#039': "'", }
+
def unescape(s):
- #return s.replace('&lt;', '<').replace('&gt;', '>').replace('&quot;', '"').replace('&#039;', "'").replace('&amp;', '&')
- return re.sub(r'&(lt|gt|quot|amp|#039);', lambda m: dct[m.group(1)], s)
+ # return s.replace('&lt;', '<').replace('&gt;', '>').replace('&quot;', '"').replace('&#039;', "'").replace('&amp;', '&')
+ return re.sub(r'&(lt|gt|quot|amp|#039);', lambda m: dct[m.group(1)], s)
s = to_str(s)
- s = re.sub(r'%3C%60%23(.*?)%23%60%3E', lambda m: '#{%s}' % unquote(m.group(1)), s)
- s = re.sub(r'%3C%60%24(.*?)%24%60%3E', lambda m: '${%s}' % unquote(m.group(1)), s)
- s = re.sub(r'&lt;`#(.*?)#`&gt;', lambda m: '#{%s}' % unescape(m.group(1)), s)
- s = re.sub(r'&lt;`\$(.*?)\$`&gt;', lambda m: '${%s}' % unescape(m.group(1)), s)
+ s = re.sub(r'%3C%60%23(.*?)%23%60%3E',
+ lambda m: '#{%s}' % unquote(m.group(1)), s)
+ s = re.sub(r'%3C%60%24(.*?)%24%60%3E',
+ lambda m: '${%s}' % unquote(m.group(1)), s)
+ s = re.sub(r'&lt;`#(.*?)#`&gt;',
+ lambda m: '#{%s}' % unescape(m.group(1)), s)
+ s = re.sub(r'&lt;`\$(.*?)\$`&gt;',
+ lambda m: '${%s}' % unescape(m.group(1)), s)
s = re.sub(r'<`#(.*?)#`>', r'#{\1}', s)
s = re.sub(r'<`\$(.*?)\$`>', r'${\1}', s)
return s
+
helpers = create_module('tenjin.helpers', _dummy, sys=sys, re=re)
helpers.__all__ = ['to_str', 'escape', 'echo', 'new_cycle', 'generate_tostrfunc',
'start_capture', 'stop_capture', 'capture_as', 'captured_as',
@@ -293,13 +324,14 @@ generate_tostrfunc = helpers.generate_tostrfunc
##
-## escaped module
+# escaped module
##
def _dummy():
global is_escaped, as_escaped, to_escaped
global Escaped, EscapedStr, EscapedUnicode
global __all__
- __all__ = ('is_escaped', 'as_escaped', 'to_escaped', ) #'Escaped', 'EscapedStr',
+ # 'Escaped', 'EscapedStr',
+ __all__ = ('is_escaped', 'as_escaped', 'to_escaped', )
class Escaped(object):
"""marking class that object is already escaped."""
@@ -319,8 +351,10 @@ def _dummy():
def as_escaped(s):
"""mark string as escaped, without escaping."""
- if isinstance(s, str): return EscapedStr(s)
- if isinstance(s, unicode): return EscapedUnicode(s)
+ if isinstance(s, str):
+ return EscapedStr(s)
+ if isinstance(s, unicode):
+ return EscapedUnicode(s)
raise TypeError("as_escaped(%r): expected str or unicode." % (s, ))
def to_escaped(value):
@@ -329,23 +363,24 @@ def _dummy():
if hasattr(value, '__html__'):
value = value.__html__()
if is_escaped(value):
- #return value # EscapedUnicode should be convered into EscapedStr
+ # return value # EscapedUnicode should be convered into EscapedStr
return as_escaped(_helpers.to_str(value))
- #if isinstance(value, _basestring):
+ # if isinstance(value, _basestring):
# return as_escaped(_helpers.escape(value))
return as_escaped(_helpers.escape(_helpers.to_str(value)))
+
escaped = create_module('tenjin.escaped', _dummy, _helpers=helpers)
##
-## module for html
+# module for html
##
def _dummy():
global escape_html, escape_xml, escape, tagattr, tagattrs, _normalize_attrs
global checked, selected, disabled, nl2br, text2html, nv, js_link
- #_escape_table = { '&': '&amp;', '<': '&lt;', '>': '&gt;', '"': '&quot;', "'": '&#39;' }
+ # _escape_table = { '&': '&amp;', '<': '&lt;', '>': '&gt;', '"': '&quot;', "'": '&#39;' }
#_escape_pattern = re.compile(r'[&<>"]')
##_escape_callable = lambda m: _escape_table[m.group(0)]
##_escape_callable = lambda m: _escape_table.__get__(m.group(0))
@@ -353,22 +388,22 @@ def _dummy():
#_escape_callable = lambda m: _escape_get(m.group(0))
#_escape_sub = _escape_pattern.sub
- #def escape_html(s):
+ # def escape_html(s):
# return s # 3.02
- #def escape_html(s):
+ # def escape_html(s):
# return _escape_pattern.sub(_escape_callable, s) # 6.31
- #def escape_html(s):
+ # def escape_html(s):
# return _escape_sub(_escape_callable, s) # 6.01
- #def escape_html(s, _p=_escape_pattern, _f=_escape_callable):
+ # def escape_html(s, _p=_escape_pattern, _f=_escape_callable):
# return _p.sub(_f, s) # 6.27
- #def escape_html(s, _sub=_escape_pattern.sub, _callable=_escape_callable):
+ # def escape_html(s, _sub=_escape_pattern.sub, _callable=_escape_callable):
# return _sub(_callable, s) # 6.04
- #def escape_html(s):
+ # def escape_html(s):
# s = s.replace('&', '&amp;')
# s = s.replace('<', '&lt;')
# s = s.replace('>', '&gt;')
@@ -384,9 +419,12 @@ def _dummy():
def tagattr(name, expr, value=None, escape=True):
"""(experimental) Return ' name="value"' if expr is true value, else '' (empty string).
If value is not specified, expr is used as value instead."""
- if not expr and expr != 0: return _escaped.as_escaped('')
- if value is None: value = expr
- if escape: value = _escaped.to_escaped(value)
+ if not expr and expr != 0:
+ return _escaped.as_escaped('')
+ if value is None:
+ value = expr
+ if escape:
+ value = _escaped.to_escaped(value)
return _escaped.as_escaped(' %s="%s"' % (name, value))
def tagattrs(**kwargs):
@@ -399,14 +437,19 @@ def _dummy():
"""
kwargs = _normalize_attrs(kwargs)
esc = _escaped.to_escaped
- s = ''.join([ ' %s="%s"' % (k, esc(v)) for k, v in kwargs.iteritems() if v or v == 0 ])
+ s = ''.join([' %s="%s"' % (k, esc(v))
+ for k, v in kwargs.iteritems() if v or v == 0])
return _escaped.as_escaped(s)
def _normalize_attrs(kwargs):
- if 'klass' in kwargs: kwargs['class'] = kwargs.pop('klass')
- if 'checked' in kwargs: kwargs['checked'] = kwargs.pop('checked') and 'checked' or None
- if 'selected' in kwargs: kwargs['selected'] = kwargs.pop('selected') and 'selected' or None
- if 'disabled' in kwargs: kwargs['disabled'] = kwargs.pop('disabled') and 'disabled' or None
+ if 'klass' in kwargs:
+ kwargs['class'] = kwargs.pop('klass')
+ if 'checked' in kwargs:
+ kwargs['checked'] = kwargs.pop('checked') and 'checked' or None
+ if 'selected' in kwargs:
+ kwargs['selected'] = kwargs.pop('selected') and 'selected' or None
+ if 'disabled' in kwargs:
+ kwargs['disabled'] = kwargs.pop('disabled') and 'disabled' or None
return kwargs
def checked(expr):
@@ -432,8 +475,9 @@ def _dummy():
if not text:
return _escaped.as_escaped('')
s = _escaped.to_escaped(text)
- if use_nbsp: s = s.replace(' ', ' &nbsp;')
- #return nl2br(s)
+ if use_nbsp:
+ s = s.replace(' ', ' &nbsp;')
+ # return nl2br(s)
s = s.replace('\n', '<br />\n')
return _escaped.as_escaped(s)
@@ -449,19 +493,20 @@ def _dummy():
>>> nv('rank', 'A', '.', klass='error', style='color:red')
'name="rank" value="A" id="rank.A" class="error" style="color:red"'
"""
- name = _escaped.to_escaped(name)
+ name = _escaped.to_escaped(name)
value = _escaped.to_escaped(value)
s = sep and 'name="%s" value="%s" id="%s"' % (name, value, name+sep+value) \
- or 'name="%s" value="%s"' % (name, value)
+ or 'name="%s" value="%s"' % (name, value)
html = kwargs and s + tagattrs(**kwargs) or s
return _escaped.as_escaped(html)
def js_link(label, onclick, **kwargs):
s = kwargs and tagattrs(**kwargs) or ''
html = '<a href="javascript:undefined" onclick="%s;return false"%s>%s</a>' % \
- (_escaped.to_escaped(onclick), s, _escaped.to_escaped(label))
+ (_escaped.to_escaped(onclick), s, _escaped.to_escaped(label))
return _escaped.as_escaped(html)
+
html = create_module('tenjin.html', _dummy, helpers=helpers, _escaped=escaped)
helpers.escape = html.escape_html
helpers.html = html # for backward compatibility
@@ -469,10 +514,11 @@ sys.modules['tenjin.helpers.html'] = html
##
-## utility function to set default encoding of template files
+# utility function to set default encoding of template files
##
_template_encoding = (None, 'utf-8') # encodings for decode and encode
+
def set_template_encoding(decode=None, encode=None):
"""Set default encoding of template files.
This should be called before importing helper functions.
@@ -486,9 +532,11 @@ def set_template_encoding(decode=None, encode=None):
if _template_encoding == (decode, encode):
return
if decode and encode:
- raise ValueError("set_template_encoding(): cannot specify both decode and encode.")
+ raise ValueError(
+ "set_template_encoding(): cannot specify both decode and encode.")
if not decode and not encode:
- raise ValueError("set_template_encoding(): decode or encode should be specified.")
+ raise ValueError(
+ "set_template_encoding(): decode or encode should be specified.")
if decode:
Template.encoding = decode # unicode base template
helpers.to_str = helpers.generate_tostrfunc(decode=decode)
@@ -499,7 +547,7 @@ def set_template_encoding(decode=None, encode=None):
##
-## Template class
+# Template class
##
class TemplateSyntaxError(SyntaxError):
@@ -510,8 +558,8 @@ class TemplateSyntaxError(SyntaxError):
return self.args[0]
return ''.join([
"%s:%s:%s: %s\n" % (ex.filename, ex.lineno, ex.offset, ex.msg, ),
- "%4d: %s\n" % (ex.lineno, ex.text.rstrip(), ),
- " %s^\n" % (' ' * ex.offset, ),
+ "%4d: %s\n" % (ex.lineno, ex.text.rstrip(), ),
+ " %s^\n" % (' ' * ex.offset, ),
])
@@ -522,21 +570,21 @@ class Template(object):
http://www.kuwata-lab.com/tenjin/pytenjin-examples.html
"""
- ## default value of attributes
- filename = None
- encoding = None
+ # default value of attributes
+ filename = None
+ encoding = None
escapefunc = 'escape'
- tostrfunc = 'to_str'
- indent = 4
- preamble = None # "_buf = []; _expand = _buf.expand; _to_str = to_str; _escape = escape"
- postamble = None # "print ''.join(_buf)"
- smarttrim = None
- args = None
- timestamp = None
- trace = False # if True then '<!-- begin: file -->' and '<!-- end: file -->' are printed
+ tostrfunc = 'to_str'
+ indent = 4
+ preamble = None # "_buf = []; _expand = _buf.expand; _to_str = to_str; _escape = escape"
+ postamble = None # "print ''.join(_buf)"
+ smarttrim = None
+ args = None
+ timestamp = None
+ trace = False # if True then '<!-- begin: file -->' and '<!-- end: file -->' are printed
def __init__(self, filename=None, encoding=None, input=None, escapefunc=None, tostrfunc=None,
- indent=None, preamble=None, postamble=None, smarttrim=None, trace=None):
+ indent=None, preamble=None, postamble=None, smarttrim=None, trace=None):
"""Initailizer of Template class.
filename:str (=None)
@@ -565,29 +613,40 @@ class Template(object):
If True then "<div>\\n#{_context}\\n</div>" is parsed as
"<div>\\n#{_context}</div>".
"""
- if encoding is not None: self.encoding = encoding
- if escapefunc is not None: self.escapefunc = escapefunc
- if tostrfunc is not None: self.tostrfunc = tostrfunc
- if indent is not None: self.indent = indent
- if preamble is not None: self.preamble = preamble
- if postamble is not None: self.postamble = postamble
- if smarttrim is not None: self.smarttrim = smarttrim
- if trace is not None: self.trace = trace
+ if encoding is not None:
+ self.encoding = encoding
+ if escapefunc is not None:
+ self.escapefunc = escapefunc
+ if tostrfunc is not None:
+ self.tostrfunc = tostrfunc
+ if indent is not None:
+ self.indent = indent
+ if preamble is not None:
+ self.preamble = preamble
+ if postamble is not None:
+ self.postamble = postamble
+ if smarttrim is not None:
+ self.smarttrim = smarttrim
+ if trace is not None:
+ self.trace = trace
#
- if preamble is True: self.preamble = "_buf = []"
- if postamble is True: self.postamble = "print(''.join(_buf))"
+ if preamble is True:
+ self.preamble = "_buf = []"
+ if postamble is True:
+ self.postamble = "print(''.join(_buf))"
if input:
self.convert(input, filename)
- self.timestamp = False # False means 'file not exist' (= Engine should not check timestamp of file)
+ # False means 'file not exist' (= Engine should not check timestamp of file)
+ self.timestamp = False
elif filename:
self.convert_file(filename)
else:
self._reset()
def _reset(self, input=None, filename=None):
- self.script = None
+ self.script = None
self.bytecode = None
- self.input = input
+ self.input = input
self.filename = filename
if input != None:
i = input.find("\n")
@@ -648,7 +707,8 @@ class Template(object):
return pat
def parse_stmts(self, buf, input):
- if not input: return
+ if not input:
+ return
rexp = self.stmt_pattern()
is_bol = True
index = 0
@@ -658,7 +718,7 @@ class Template(object):
#code = input[m.start()+4+len(mspace):m.end()-len(close)-(rspace and len(rspace) or 0)]
text = input[index:m.start()]
index = m.end()
- ## detect spaces at beginning of line
+ # detect spaces at beginning of line
lspace = None
if text == '':
if is_bol:
@@ -675,13 +735,13 @@ class Template(object):
if s.isspace():
lspace, text = s, text[:rindex+1]
#is_bol = rspace is not None
- ## add text, spaces, and statement
+ # add text, spaces, and statement
self.parse_exprs(buf, text, is_bol)
is_bol = rspace is not None
- #if mspace == "\n":
+ # if mspace == "\n":
if mspace and mspace.endswith("\n"):
code = "\n" + (code or "")
- #if rspace == "\n":
+ # if rspace == "\n":
if rspace and rspace.endswith("\n"):
code = (code or "") + "\n"
if code:
@@ -708,10 +768,12 @@ class Template(object):
def _add_args_declaration(self, buf, m):
arr = (m.group(1) or '').split(',')
- args = []; declares = []
+ args = []
+ declares = []
for s in arr:
arg = s.strip()
- if not s: continue
+ if not s:
+ continue
if not re.match('^[a-zA-Z_]\w*$', arg):
raise ValueError("%r: invalid template argument." % arg)
args.append(arg)
@@ -722,7 +784,8 @@ class Template(object):
buf.append(''.join(declares) + "\n")
s = '(?:\{.*?\}.*?)*'
- EXPR_PATTERN = (r'#\{(.*?'+s+r')\}|\$\{(.*?'+s+r')\}|\{=(?:=(.*?)=|(.*?))=\}', re.S)
+ EXPR_PATTERN = (
+ r'#\{(.*?'+s+r')\}|\$\{(.*?'+s+r')\}|\{=(?:=(.*?)=|(.*?))=\}', re.S)
del s
def expr_pattern(self):
@@ -733,10 +796,14 @@ class Template(object):
def get_expr_and_flags(self, match):
expr1, expr2, expr3, expr4 = match.groups()
- if expr1 is not None: return expr1, (False, True) # not escape, call to_str
- if expr2 is not None: return expr2, (True, True) # call escape, call to_str
- if expr3 is not None: return expr3, (False, True) # not escape, call to_str
- if expr4 is not None: return expr4, (True, True) # call escape, call to_str
+ if expr1 is not None:
+ return expr1, (False, True) # not escape, call to_str
+ if expr2 is not None:
+ return expr2, (True, True) # call escape, call to_str
+ if expr3 is not None:
+ return expr3, (False, True) # not escape, call to_str
+ if expr4 is not None:
+ return expr4, (True, True) # call escape, call to_str
def parse_exprs(self, buf, input, is_bol=False):
buf2 = []
@@ -745,17 +812,18 @@ class Template(object):
buf.append(''.join(buf2))
def _parse_exprs(self, buf, input, is_bol=False):
- if not input: return
+ if not input:
+ return
self.start_text_part(buf)
rexp = self.expr_pattern()
smarttrim = self.smarttrim
nl = self.newline
- nl_len = len(nl)
+ nl_len = len(nl)
pos = 0
for m in rexp.finditer(input):
start = m.start()
- text = input[pos:start]
- pos = m.end()
+ text = input[pos:start]
+ pos = m.end()
expr, flags = self.get_expr_and_flags(m)
#
if text:
@@ -763,7 +831,8 @@ class Template(object):
self.add_expr(buf, expr, *flags)
#
if smarttrim:
- flag_bol = text.endswith(nl) or not text and (start > 0 or is_bol)
+ flag_bol = text.endswith(
+ nl) or not text and (start > 0 or is_bol)
if flag_bol and not flags[0] and input[pos:pos+nl_len] == nl:
pos += nl_len
buf.append("\n")
@@ -779,7 +848,7 @@ class Template(object):
def start_text_part(self, buf):
self._add_localvars_assignments_to_text(buf)
- #buf.append("_buf.extend((")
+ # buf.append("_buf.extend((")
buf.append("_extend((")
def _add_localvars_assignments_to_text(self, buf):
@@ -796,30 +865,43 @@ class Template(object):
return text
def add_text(self, buf, text, encode_newline=False):
- if not text: return
+ if not text:
+ return
use_unicode = self.encoding and python2
buf.append(use_unicode and "u'''" or "'''")
text = self._quote_text(text)
- if not encode_newline: buf.extend((text, "''', "))
- elif text.endswith("\r\n"): buf.extend((text[0:-2], "\\r\\n''', "))
- elif text.endswith("\n"): buf.extend((text[0:-1], "\\n''', "))
- else: buf.extend((text, "''', "))
+ if not encode_newline:
+ buf.extend((text, "''', "))
+ elif text.endswith("\r\n"):
+ buf.extend((text[0:-2], "\\r\\n''', "))
+ elif text.endswith("\n"):
+ buf.extend((text[0:-1], "\\n''', "))
+ else:
+ buf.extend((text, "''', "))
_add_text = add_text
def add_expr(self, buf, code, *flags):
- if not code or code.isspace(): return
+ if not code or code.isspace():
+ return
flag_escape, flag_tostr = flags
- if not self.tostrfunc: flag_tostr = False
- if not self.escapefunc: flag_escape = False
- if flag_tostr and flag_escape: s1, s2 = "_escape(_to_str(", ")), "
- elif flag_tostr: s1, s2 = "_to_str(", "), "
- elif flag_escape: s1, s2 = "_escape(", "), "
- else: s1, s2 = "(", "), "
+ if not self.tostrfunc:
+ flag_tostr = False
+ if not self.escapefunc:
+ flag_escape = False
+ if flag_tostr and flag_escape:
+ s1, s2 = "_escape(_to_str(", ")), "
+ elif flag_tostr:
+ s1, s2 = "_to_str(", "), "
+ elif flag_escape:
+ s1, s2 = "_escape(", "), "
+ else:
+ s1, s2 = "(", "), "
buf.extend((s1, code, s2, ))
def add_stmt(self, buf, code):
- if not code: return
+ if not code:
+ return
lines = code.splitlines(True) # keep "\n"
if lines[-1][-1] != "\n":
lines[-1] = lines[-1] + "\n"
@@ -840,59 +922,64 @@ class Template(object):
else:
buf[index] = self._localvars_assignments() + buf[index]
-
- _START_WORDS = dict.fromkeys(('for', 'if', 'while', 'def', 'try:', 'with', 'class'), True)
- _END_WORDS = dict.fromkeys(('#end', '#endfor', '#endif', '#endwhile', '#enddef', '#endtry', '#endwith', '#endclass'), True)
- _CONT_WORDS = dict.fromkeys(('elif', 'else:', 'except', 'except:', 'finally:'), True)
- _WORD_REXP = re.compile(r'\S+')
+ _START_WORDS = dict.fromkeys(
+ ('for', 'if', 'while', 'def', 'try:', 'with', 'class'), True)
+ _END_WORDS = dict.fromkeys(('#end', '#endfor', '#endif', '#endwhile',
+ '#enddef', '#endtry', '#endwith', '#endclass'), True)
+ _CONT_WORDS = dict.fromkeys(
+ ('elif', 'else:', 'except', 'except:', 'finally:'), True)
+ _WORD_REXP = re.compile(r'\S+')
depth = -1
##
- ## ex.
- ## input = r"""
- ## if items:
+ # ex.
+ # input = r"""
+ # if items:
## _buf.extend(('<ul>\n', ))
## i = 0
- ## for item in items:
+ # for item in items:
## i += 1
## _buf.extend(('<li>', to_str(item), '</li>\n', ))
- ## #endfor
+ # endfor
## _buf.extend(('</ul>\n', ))
- ## #endif
- ## """[1:]
+ # endif
+ # """[1:]
## lines = input.splitlines(True)
## block = self.parse_lines(lines)
- ## #=> [ "if items:\n",
- ## [ "_buf.extend(('<ul>\n', ))\n",
+ # => [ "if items:\n",
+ # [ "_buf.extend(('<ul>\n', ))\n",
## "i = 0\n",
## "for item in items:\n",
- ## [ "i += 1\n",
+ # [ "i += 1\n",
## "_buf.extend(('<li>', to_str(item), '</li>\n', ))\n",
- ## ],
- ## "#endfor\n",
+ # ],
+ # "#endfor\n",
## "_buf.extend(('</ul>\n', ))\n",
- ## ],
- ## "#endif\n",
- ## ]
+ # ],
+ # "#endif\n",
+ # ]
def parse_lines(self, lines):
block = []
try:
self._parse_lines(lines.__iter__(), False, block, 0)
except StopIteration:
if self.depth > 0:
- fname, linenum, colnum, linetext = self.filename, len(lines), None, None
- raise TemplateSyntaxError("unexpected EOF.", (fname, linenum, colnum, linetext))
+ fname, linenum, colnum, linetext = self.filename, len(
+ lines), None, None
+ raise TemplateSyntaxError(
+ "unexpected EOF.", (fname, linenum, colnum, linetext))
else:
pass
return block
def _parse_lines(self, lines_iter, end_block, block, linenum):
- if block is None: block = []
+ if block is None:
+ block = []
_START_WORDS = self._START_WORDS
- _END_WORDS = self._END_WORDS
- _CONT_WORDS = self._CONT_WORDS
- _WORD_REXP = self._WORD_REXP
+ _END_WORDS = self._END_WORDS
+ _CONT_WORDS = self._CONT_WORDS
+ _WORD_REXP = self._WORD_REXP
get_line = lines_iter.next
while True:
line = get_line()
@@ -905,11 +992,13 @@ class Template(object):
if word in _END_WORDS:
if word != end_block and word != '#end':
if end_block is False:
- msg = "'%s' found but corresponding statement is missing." % (word, )
+ msg = "'%s' found but corresponding statement is missing." % (
+ word, )
else:
msg = "'%s' expected but got '%s'." % (end_block, word)
colnum = m.start() + 1
- raise TemplateSyntaxError(msg, (self.filename, linenum, colnum, line))
+ raise TemplateSyntaxError(
+ msg, (self.filename, linenum, colnum, line))
return block, line, None, linenum
elif line.endswith(':\n') or line.endswith(':\r\n'):
if word in _CONT_WORDS:
@@ -920,16 +1009,19 @@ class Template(object):
cont_word = None
try:
child_block, line, cont_word, linenum = \
- self._parse_lines(lines_iter, '#end'+word, [], linenum)
+ self._parse_lines(
+ lines_iter, '#end'+word, [], linenum)
block.extend((child_block, line, ))
while cont_word: # 'elif' or 'else:'
child_block, line, cont_word, linenum = \
- self._parse_lines(lines_iter, '#end'+word, [], linenum)
+ self._parse_lines(
+ lines_iter, '#end'+word, [], linenum)
block.extend((child_block, line, ))
except StopIteration:
msg = "'%s' is not closed." % (cont_word or word)
colnum = m.start() + 1
- raise TemplateSyntaxError(msg, (self.filename, linenum, colnum, line))
+ raise TemplateSyntaxError(
+ msg, (self.filename, linenum, colnum, line))
self.depth -= 1
else:
block.append(line)
@@ -953,7 +1045,6 @@ class Template(object):
buf[:] = []
self._join_block(block, buf, 0)
-
def render(self, context=None, globals=None, _buf=None):
"""Evaluate python code with context dictionary.
If _buf is None then return the result of evaluation as str,
@@ -1003,11 +1094,12 @@ class Template(object):
def compile(self):
"""compile self.script into self.bytecode"""
- self.bytecode = compile(self.script, self.filename or '(tenjin)', 'exec')
+ self.bytecode = compile(
+ self.script, self.filename or '(tenjin)', 'exec')
##
-## preprocessor class
+# preprocessor class
##
class Preprocessor(Template):
@@ -1015,7 +1107,8 @@ class Preprocessor(Template):
STMT_PATTERN = (r'<\?PY( |\t|\r?\n)(.*?) ?\?>([ \t]*\r?\n)?', re.S)
- EXPR_PATTERN = (r'#\{\{(.*?)\}\}|\$\{\{(.*?)\}\}|\{#=(?:=(.*?)=|(.*?))=#\}', re.S)
+ EXPR_PATTERN = (
+ r'#\{\{(.*?)\}\}|\$\{\{(.*?)\}\}|\{#=(?:=(.*?)=|(.*?))=#\}', re.S)
def add_expr(self, buf, code, *flags):
if not code or code.isspace():
@@ -1028,13 +1121,14 @@ class TemplatePreprocessor(object):
factory = Preprocessor
def __init__(self, factory=None):
- if factory is not None: self.factory = factory
+ if factory is not None:
+ self.factory = factory
self.globals = sys._getframe(1).f_globals
def __call__(self, input, **kwargs):
filename = kwargs.get('filename')
- context = kwargs.get('context') or {}
- globals = kwargs.get('globals') or self.globals
+ context = kwargs.get('context') or {}
+ globals = kwargs.get('globals') or self.globals
template = self.factory()
template.convert(input, filename)
return template.render(context, globals=globals)
@@ -1042,7 +1136,7 @@ class TemplatePreprocessor(object):
class TrimPreprocessor(object):
- _rexp = re.compile(r'^[ \t]+<', re.M)
+ _rexp = re.compile(r'^[ \t]+<', re.M)
_rexp_all = re.compile(r'^[ \t]+', re.M)
def __init__(self, all=False):
@@ -1062,22 +1156,25 @@ class PrefixedLinePreprocessor(object):
self.regexp = re.compile(r'^([ \t]*)' + prefix + r'(.*)', re.M)
def convert_prefixed_lines(self, text):
- fn = lambda m: "%s<?py%s ?>" % (m.group(1), m.group(2))
+ def fn(m): return "%s<?py%s ?>" % (m.group(1), m.group(2))
return self.regexp.sub(fn, text)
STMT_REXP = re.compile(r'<\?py\s.*?\?>', re.S)
def __call__(self, input, **kwargs):
- buf = []; append = buf.append
+ buf = []
+ append = buf.append
pos = 0
for m in self.STMT_REXP.finditer(input):
text = input[pos:m.start()]
stmt = m.group(0)
pos = m.end()
- if text: append(self.convert_prefixed_lines(text))
+ if text:
+ append(self.convert_prefixed_lines(text))
append(stmt)
rest = input[pos:]
- if rest: append(self.convert_prefixed_lines(rest))
+ if rest:
+ append(self.convert_prefixed_lines(rest))
return "".join(buf)
@@ -1098,7 +1195,8 @@ class JavaScriptPreprocessor(object):
self._parse_chunks(input, buf, filename)
return ''.join(buf)
- CHUNK_REXP = re.compile(r'(?:^( *)<|<)!-- *#(?:JS: (\$?\w+(?:\.\w+)*\(.*?\))|/JS:?) *-->([ \t]*\r?\n)?', re.M)
+ CHUNK_REXP = re.compile(
+ r'(?:^( *)<|<)!-- *#(?:JS: (\$?\w+(?:\.\w+)*\(.*?\))|/JS:?) *-->([ \t]*\r?\n)?', re.M)
def _scan_chunks(self, input, filename):
rexp = self.CHUNK_REXP
@@ -1110,23 +1208,24 @@ class JavaScriptPreprocessor(object):
pos = m.end()
if funcdecl:
if curr_funcdecl:
- raise ParseError("%s is nested in %s. (file: %s, line: %s)" % \
- (funcdecl, curr_funcdecl, filename, _linenum(input, m.start()), ))
+ raise ParseError("%s is nested in %s. (file: %s, line: %s)" %
+ (funcdecl, curr_funcdecl, filename, _linenum(input, m.start()), ))
curr_funcdecl = funcdecl
else:
if not curr_funcdecl:
- raise ParseError("unexpected '<!-- #/JS -->'. (file: %s, line: %s)" % \
- (filename, _linenum(input, m.start()), ))
+ raise ParseError("unexpected '<!-- #/JS -->'. (file: %s, line: %s)" %
+ (filename, _linenum(input, m.start()), ))
curr_funcdecl = None
yield text, lspace, funcdecl, rspace, False
if curr_funcdecl:
- raise ParseError("%s is not closed by '<!-- #/JS -->'. (file: %s, line: %s)" % \
- (curr_funcdecl, filename, _linenum(input, m.start()), ))
+ raise ParseError("%s is not closed by '<!-- #/JS -->'. (file: %s, line: %s)" %
+ (curr_funcdecl, filename, _linenum(input, m.start()), ))
rest = input[pos:]
yield rest, None, None, None, True
def _parse_chunks(self, input, buf, filename=None):
- if not input: return
+ if not input:
+ return
stag = '<script'
if self._attrs:
for k in self._attrs:
@@ -1134,21 +1233,26 @@ class JavaScriptPreprocessor(object):
stag += '>'
etag = '</script>'
for text, lspace, funcdecl, rspace, end_p in self._scan_chunks(input, filename):
- if end_p: break
+ if end_p:
+ break
if funcdecl:
buf.append(text)
if re.match(r'^\$?\w+\(', funcdecl):
- buf.extend((lspace or '', stag, 'function ', funcdecl, "{var _buf='';", rspace or ''))
+ buf.extend((lspace or '', stag, 'function ',
+ funcdecl, "{var _buf='';", rspace or ''))
else:
m = re.match(r'(.+?)\((.*)\)', funcdecl)
- buf.extend((lspace or '', stag, m.group(1), '=function(', m.group(2), "){var _buf='';", rspace or ''))
+ buf.extend((lspace or '', stag, m.group(
+ 1), '=function(', m.group(2), "){var _buf='';", rspace or ''))
else:
self._parse_stmts(text, buf)
- buf.extend((lspace or '', "return _buf;};", etag, rspace or ''))
+ buf.extend(
+ (lspace or '', "return _buf;};", etag, rspace or ''))
#
buf.append(text)
- STMT_REXP = re.compile(r'(?:^( *)<|<)\?js(\s.*?) ?\?>([ \t]*\r?\n)?', re.M | re.S)
+ STMT_REXP = re.compile(
+ r'(?:^( *)<|<)\?js(\s.*?) ?\?>([ \t]*\r?\n)?', re.M | re.S)
def _scan_stmts(self, input):
rexp = self.STMT_REXP
@@ -1162,9 +1266,11 @@ class JavaScriptPreprocessor(object):
yield rest, None, None, None, True
def _parse_stmts(self, input, buf):
- if not input: return
+ if not input:
+ return
for text, lspace, code, rspace, end_p in self._scan_stmts(input):
- if end_p: break
+ if end_p:
+ break
if lspace is not None and rspace is not None:
self._parse_exprs(text, buf)
buf.extend((lspace, code, rspace))
@@ -1207,7 +1313,8 @@ class JavaScriptPreprocessor(object):
yield rest, None, None, True
def _parse_exprs(self, input, buf):
- if not input: return
+ if not input:
+ return
buf.append("_buf+=")
extend = buf.extend
op = ''
@@ -1231,7 +1338,7 @@ class JavaScriptPreprocessor(object):
def _escape_text(self, text):
lines = text.splitlines(True)
fn = self._escape_str
- s = "\\\n".join( fn(line) for line in lines )
+ s = "\\\n".join(fn(line) for line in lines)
return "".join(("'", s, "'"))
def _escape_str(self, string):
@@ -1251,9 +1358,8 @@ function _EF(c){return _ET[c];};
JS_FUNC = escaped.EscapedStr(JS_FUNC)
-
##
-## cache storages
+# cache storages
##
class CacheStorage(object):
@@ -1281,8 +1387,8 @@ class CacheStorage(object):
return self._store(cachepath, dct)
def _save_data_of(self, template):
- return { 'args' : template.args, 'bytecode' : template.bytecode,
- 'script': template.script, 'timestamp': template.timestamp }
+ return {'args': template.args, 'bytecode': template.bytecode,
+ 'script': template.script, 'timestamp': template.timestamp}
def unset(self, cachepath):
"""remove template object from dict and cache file."""
@@ -1298,15 +1404,18 @@ class CacheStorage(object):
def _load(self, cachepath):
"""(abstract) load dict object which represents template object attributes from cache file."""
- raise NotImplementedError.new("%s#_load(): not implemented yet." % self.__class__.__name__)
+ raise NotImplementedError.new(
+ "%s#_load(): not implemented yet." % self.__class__.__name__)
def _store(self, cachepath, template):
"""(abstract) load dict object which represents template object attributes from cache file."""
- raise NotImplementedError.new("%s#_store(): not implemented yet." % self.__class__.__name__)
+ raise NotImplementedError.new(
+ "%s#_store(): not implemented yet." % self.__class__.__name__)
def _delete(self, cachepath):
"""(abstract) remove template object from cache file."""
- raise NotImplementedError.new("%s#_delete(): not implemented yet." % self.__class__.__name__)
+ raise NotImplementedError.new(
+ "%s#_delete(): not implemented yet." % self.__class__.__name__)
class MemoryCacheStorage(CacheStorage):
@@ -1324,21 +1433,28 @@ class MemoryCacheStorage(CacheStorage):
class FileCacheStorage(CacheStorage):
def _load(self, cachepath):
- if not _isfile(cachepath): return None
- if logger: logger.info("[tenjin.%s] load cache (file=%r)" % (self.__class__.__name__, cachepath))
+ if not _isfile(cachepath):
+ return None
+ if logger:
+ logger.info("[tenjin.%s] load cache (file=%r)" %
+ (self.__class__.__name__, cachepath))
data = _read_binary_file(cachepath)
return self._restore(data)
def _store(self, cachepath, dct):
- if logger: logger.info("[tenjin.%s] store cache (file=%r)" % (self.__class__.__name__, cachepath))
+ if logger:
+ logger.info("[tenjin.%s] store cache (file=%r)" %
+ (self.__class__.__name__, cachepath))
data = self._dump(dct)
_write_binary_file(cachepath, data)
def _restore(self, data):
- raise NotImplementedError("%s._restore(): not implemented yet." % self.__class__.__name__)
+ raise NotImplementedError(
+ "%s._restore(): not implemented yet." % self.__class__.__name__)
def _dump(self, dct):
- raise NotImplementedError("%s._dump(): not implemented yet." % self.__class__.__name__)
+ raise NotImplementedError(
+ "%s._dump(): not implemented yet." % self.__class__.__name__)
def _delete(self, cachepath):
_ignore_not_found_error(lambda: os.unlink(cachepath))
@@ -1376,16 +1492,20 @@ class TextCacheStorage(FileCacheStorage):
timestamp = encoding = args = None
for line in header.split("\n"):
key, val = line.split(": ", 1)
- if key == 'timestamp': timestamp = float(val)
- elif key == 'encoding': encoding = val
- elif key == 'args': args = val.split(', ')
- if encoding: script = script.decode(encoding) ## binary(=str) to unicode
+ if key == 'timestamp':
+ timestamp = float(val)
+ elif key == 'encoding':
+ encoding = val
+ elif key == 'args':
+ args = val.split(', ')
+ if encoding:
+ script = script.decode(encoding) # binary(=str) to unicode
return {'args': args, 'script': script, 'timestamp': timestamp}
def _dump(self, dct):
s = dct['script']
if dct.get('encoding') and isinstance(s, unicode):
- s = s.encode(dct['encoding']) ## unicode to binary(=str)
+ s = s.encode(dct['encoding']) # unicode to binary(=str)
sb = []
sb.append("timestamp: %s\n" % dct['timestamp'])
if dct.get('encoding'):
@@ -1397,7 +1517,8 @@ class TextCacheStorage(FileCacheStorage):
s = ''.join(sb)
if python3:
if isinstance(s, str):
- s = s.encode(dct.get('encoding') or 'utf-8') ## unicode(=str) to binary
+ # unicode(=str) to binary
+ s = s.encode(dct.get('encoding') or 'utf-8')
return s
def _save_data_of(self, template):
@@ -1406,27 +1527,30 @@ class TextCacheStorage(FileCacheStorage):
return dct
-
##
-## abstract class for data cache
+# abstract class for data cache
##
class KeyValueStore(object):
def get(self, key, *options):
- raise NotImplementedError("%s.get(): not implemented yet." % self.__class__.__name__)
+ raise NotImplementedError(
+ "%s.get(): not implemented yet." % self.__class__.__name__)
def set(self, key, value, *options):
- raise NotImplementedError("%s.set(): not implemented yet." % self.__class__.__name__)
+ raise NotImplementedError(
+ "%s.set(): not implemented yet." % self.__class__.__name__)
def delete(self, key, *options):
- raise NotImplementedError("%s.del(): not implemented yet." % self.__class__.__name__)
+ raise NotImplementedError(
+ "%s.del(): not implemented yet." % self.__class__.__name__)
def has(self, key, *options):
- raise NotImplementedError("%s.has(): not implemented yet." % self.__class__.__name__)
+ raise NotImplementedError(
+ "%s.has(): not implemented yet." % self.__class__.__name__)
##
-## memory base data cache
+# memory base data cache
##
class MemoryBaseStore(KeyValueStore):
@@ -1471,7 +1595,7 @@ class MemoryBaseStore(KeyValueStore):
##
-## file base data cache
+# file base data cache
##
class FileBaseStore(KeyValueStore):
@@ -1492,7 +1616,7 @@ class FileBaseStore(KeyValueStore):
def get(self, key, original_timestamp=None):
fpath = self.filepath(key)
- #if not _isfile(fpath): return None
+ # if not _isfile(fpath): return None
stat = _ignore_not_found_error(lambda: os.stat(fpath), None)
if stat is None:
return None
@@ -1505,9 +1629,9 @@ class FileBaseStore(KeyValueStore):
self.delete(key)
return None
if self.encoding:
- f = lambda: _read_text_file(fpath, self.encoding)
+ def f(): return _read_text_file(fpath, self.encoding)
else:
- f = lambda: _read_binary_file(fpath)
+ def f(): return _read_binary_file(fpath)
return _ignore_not_found_error(f, None)
def set(self, key, value, lifetime=0):
@@ -1538,20 +1662,21 @@ class FileBaseStore(KeyValueStore):
return True
-
##
-## html fragment cache helper class
+# html fragment cache helper class
##
class FragmentCacheHelper(object):
"""html fragment cache helper class."""
lifetime = 60 # 1 minute
- prefix = None
+ prefix = None
def __init__(self, store, lifetime=None, prefix=None):
self.store = store
- if lifetime is not None: self.lifetime = lifetime
- if prefix is not None: self.prefix = prefix
+ if lifetime is not None:
+ self.lifetime = lifetime
+ if prefix is not None:
+ self.prefix = prefix
def not_cached(self, cache_key, lifetime=None):
"""(obsolete. use cache_as() instead of this.)
@@ -1560,14 +1685,19 @@ class FragmentCacheHelper(object):
context['_cache_key'] = cache_key
key = self.prefix and self.prefix + cache_key or cache_key
value = self.store.get(key)
- if value: ## cached
- if logger: logger.debug('[tenjin.not_cached] %r: cached.' % (cache_key, ))
+ if value: # cached
+ if logger:
+ logger.debug('[tenjin.not_cached] %r: cached.' % (cache_key, ))
context[key] = value
return False
- else: ## not cached
- if logger: logger.debug('[tenjin.not_cached]: %r: not cached.' % (cache_key, ))
- if key in context: del context[key]
- if lifetime is None: lifetime = self.lifetime
+ else: # not cached
+ if logger:
+ logger.debug(
+ '[tenjin.not_cached]: %r: not cached.' % (cache_key, ))
+ if key in context:
+ del context[key]
+ if lifetime is None:
+ lifetime = self.lifetime
context['_cache_lifetime'] = lifetime
helpers.start_capture(cache_key, _depth=2)
return True
@@ -1579,9 +1709,9 @@ class FragmentCacheHelper(object):
context = f_locals['_context']
cache_key = context.pop('_cache_key')
key = self.prefix and self.prefix + cache_key or cache_key
- if key in context: ## cached
+ if key in context: # cached
value = context.pop(key)
- else: ## not cached
+ else: # not cached
value = helpers.stop_capture(False, _depth=2)
lifetime = context.pop('_cache_lifetime')
self.store.set(key, value, lifetime)
@@ -1596,31 +1726,36 @@ class FragmentCacheHelper(object):
_buf = sys._getframe(1).f_locals['_buf']
value = self.store.get(key)
if value:
- if logger: logger.debug('[tenjin.cache_as] %r: cache found.' % (cache_key, ))
+ if logger:
+ logger.debug('[tenjin.cache_as] %r: cache found.' %
+ (cache_key, ))
_buf.append(value)
else:
- if logger: logger.debug('[tenjin.cache_as] %r: expired or not cached yet.' % (cache_key, ))
+ if logger:
+ logger.debug(
+ '[tenjin.cache_as] %r: expired or not cached yet.' % (cache_key, ))
_buf_len = len(_buf)
yield None
value = ''.join(_buf[_buf_len:])
self.store.set(key, value, lifetime)
-## you can change default store by 'tenjin.helpers.fragment_cache.store = ...'
+
+# you can change default store by 'tenjin.helpers.fragment_cache.store = ...'
helpers.fragment_cache = FragmentCacheHelper(MemoryBaseStore())
-helpers.not_cached = helpers.fragment_cache.not_cached
+helpers.not_cached = helpers.fragment_cache.not_cached
helpers.echo_cached = helpers.fragment_cache.echo_cached
-helpers.cache_as = helpers.fragment_cache.cache_as
+helpers.cache_as = helpers.fragment_cache.cache_as
helpers.__all__.extend(('not_cached', 'echo_cached', 'cache_as'))
-
##
-## helper class to find and read template
+# helper class to find and read template
##
class Loader(object):
def exists(self, filepath):
- raise NotImplementedError("%s.exists(): not implemented yet." % self.__class__.__name__)
+ raise NotImplementedError(
+ "%s.exists(): not implemented yet." % self.__class__.__name__)
def find(self, filename, dirs=None):
#: if dirs provided then search template file from it.
@@ -1637,18 +1772,20 @@ class Loader(object):
return None
def abspath(self, filename):
- raise NotImplementedError("%s.abspath(): not implemented yet." % self.__class__.__name__)
+ raise NotImplementedError(
+ "%s.abspath(): not implemented yet." % self.__class__.__name__)
def timestamp(self, filepath):
- raise NotImplementedError("%s.timestamp(): not implemented yet." % self.__class__.__name__)
+ raise NotImplementedError(
+ "%s.timestamp(): not implemented yet." % self.__class__.__name__)
def load(self, filepath):
- raise NotImplementedError("%s.timestamp(): not implemented yet." % self.__class__.__name__)
-
+ raise NotImplementedError(
+ "%s.timestamp(): not implemented yet." % self.__class__.__name__)
##
-## helper class to find and read files
+# helper class to find and read files
##
class FileSystemLoader(Loader):
@@ -1676,7 +1813,8 @@ class FileSystemLoader(Loader):
mtime2 = _getmtime(filepath)
if mtime != mtime2:
if logger:
- logger.warn("[tenjin] %s.load(): timestamp is changed while reading file." % self.__class__.__name__)
+ logger.warn(
+ "[tenjin] %s.load(): timestamp is changed while reading file." % self.__class__.__name__)
return input, mtime
#: if file not exist, return None
return _ignore_not_found_error(f)
@@ -1689,9 +1827,8 @@ class TemplateNotFoundError(Exception):
pass
-
##
-## template engine class
+# template engine class
##
class Engine(object):
@@ -1701,15 +1838,15 @@ class Engine(object):
http://www.kuwata-lab.com/tenjin/pytenjin-examples.html
"""
- ## default value of attributes
- prefix = ''
- postfix = ''
- layout = None
+ # default value of attributes
+ prefix = ''
+ postfix = ''
+ layout = None
templateclass = Template
- path = None
- cache = TextCacheStorage() # save converted Python code into text file
- lang = None
- loader = FileSystemLoader()
+ path = None
+ cache = TextCacheStorage() # save converted Python code into text file
+ lang = None
+ loader = FileSystemLoader()
preprocess = False
preprocessorclass = Preprocessor
timestamp_interval = 1 # seconds
@@ -1743,18 +1880,30 @@ class Engine(object):
Options for Template class constructor.
See document of Template.__init__() for details.
"""
- if prefix: self.prefix = prefix
- if postfix: self.postfix = postfix
- if layout: self.layout = layout
- if templateclass: self.templateclass = templateclass
- if preprocessorclass: self.preprocessorclass = preprocessorclass
- if path is not None: self.path = path
- if lang is not None: self.lang = lang
- if loader is not None: self.loader = loader
- if preprocess is not None: self.preprocess = preprocess
- if pp is None: pp = []
- elif isinstance(pp, list): pass
- elif isinstance(pp, tuple): pp = list(pp)
+ if prefix:
+ self.prefix = prefix
+ if postfix:
+ self.postfix = postfix
+ if layout:
+ self.layout = layout
+ if templateclass:
+ self.templateclass = templateclass
+ if preprocessorclass:
+ self.preprocessorclass = preprocessorclass
+ if path is not None:
+ self.path = path
+ if lang is not None:
+ self.lang = lang
+ if loader is not None:
+ self.loader = loader
+ if preprocess is not None:
+ self.preprocess = preprocess
+ if pp is None:
+ pp = []
+ elif isinstance(pp, list):
+ pass
+ elif isinstance(pp, tuple):
+ pp = list(pp)
else:
raise TypeError("'pp' expected to be a list but got %r." % (pp,))
self.pp = pp
@@ -1798,7 +1947,7 @@ class Engine(object):
'list'
"""
#: if template_name starts with ':', add prefix and postfix to it.
- if template_name[0] == ':' :
+ if template_name[0] == ':':
return self.prefix + template_name[1:] + self.postfix
#: if template_name doesn't start with ':', just return it.
return template_name
@@ -1816,12 +1965,13 @@ class Engine(object):
#if _globals is None: _globals = sys._getframe(3).f_globals
#: preprocess template and return result
#preprocessor = self.preprocessorclass(filepath, input=input)
- #return preprocessor.render(_context, globals=_globals)
+ # return preprocessor.render(_context, globals=_globals)
#: preprocesses input with _context and returns result.
if '_engine' not in _context:
self.hook_context(_context)
for pp in self.pp:
- input = pp.__call__(input, filename=filepath, context=_context, globals=_globals)
+ input = pp.__call__(input, filename=filepath,
+ context=_context, globals=_globals)
return input
def add_template(self, template):
@@ -1837,7 +1987,7 @@ class Engine(object):
now = _time()
last_checked = getattr(template, '_last_checked_at', None)
if last_checked and now < last_checked + self.timestamp_interval:
- #if logger: logger.trace('[tenjin.%s] timestamp check skipped (%f < %f + %f)' % \
+ # if logger: logger.trace('[tenjin.%s] timestamp check skipped (%f < %f + %f)' % \
# (self.__class__.__name__, now, template._last_checked_at, self.timestamp_interval))
return template
#: if timestamp of template objectis same as file, return it.
@@ -1845,9 +1995,10 @@ class Engine(object):
template._last_checked_at = now
return template
#: if timestamp of template object is different from file, clear it
- #cache._delete(cachepath)
- if logger: logger.info("[tenjin.%s] cache expired (filepath=%r)" % \
- (self.__class__.__name__, filepath))
+ # cache._delete(cachepath)
+ if logger:
+ logger.info("[tenjin.%s] cache expired (filepath=%r)" %
+ (self.__class__.__name__, filepath))
return None
def get_template(self, template_name, _context=None, _globals=None):
@@ -1868,7 +2019,8 @@ class Engine(object):
#: if template file is not found then raise TemplateNotFoundError.
filepath = self.loader.find(filename, self.path)
if not filepath:
- raise TemplateNotFoundError('%s: filename not found (path=%r).' % (filename, self.path))
+ raise TemplateNotFoundError(
+ '%s: filename not found (path=%r).' % (filename, self.path))
#
fullpath = self.loader.abspath(filepath)
self._filepaths[filename] = (filepath, fullpath)
@@ -1876,19 +2028,24 @@ class Engine(object):
cachepath = self.cachename(fullpath)
#: get template object from cache
cache = self.cache
- template = cache and self._get_template_from_cache(cachepath, filepath) or None
+ template = cache and self._get_template_from_cache(
+ cachepath, filepath) or None
#: if template object is not found in cache or is expired...
if not template:
ret = self.loader.load(filepath)
if not ret:
- raise TemplateNotFoundError("%r: template not found." % filepath)
+ raise TemplateNotFoundError(
+ "%r: template not found." % filepath)
input, timestamp = ret
- if self.pp: ## required for preprocessing
- if _context is None: _context = {}
- if _globals is None: _globals = sys._getframe(1).f_globals
+ if self.pp: # required for preprocessing
+ if _context is None:
+ _context = {}
+ if _globals is None:
+ _globals = sys._getframe(1).f_globals
input = self._preprocess(input, filepath, _context, _globals)
#: create template object.
- template = self._create_template(input, filepath, _context, _globals)
+ template = self._create_template(
+ input, filepath, _context, _globals)
#: set timestamp and filename of template object.
template.timestamp = timestamp
template._last_checked_at = _time()
@@ -1896,10 +2053,12 @@ class Engine(object):
if cache:
if not template.bytecode:
#: ignores syntax error when compiling.
- try: template.compile()
- except SyntaxError: pass
+ try:
+ template.compile()
+ except SyntaxError:
+ pass
cache.set(cachepath, template)
- #else:
+ # else:
# template.compile()
#:
template.filename = filepath
@@ -1921,7 +2080,7 @@ class Engine(object):
"""
#: get local and global vars of caller.
frame = sys._getframe(1)
- locals = frame.f_locals
+ locals = frame.f_locals
globals = frame.f_globals
#: get _context from caller's local vars.
assert '_context' in locals
@@ -1930,12 +2089,14 @@ class Engine(object):
if kwargs:
context.update(kwargs)
#: get template object with context data and global vars.
- ## (context and globals are passed to get_template() only for preprocessing.)
+ # (context and globals are passed to get_template() only for preprocessing.)
template = self.get_template(template_name, context, globals)
#: if append_to_buf is true then add output to _buf.
#: if append_to_buf is false then don't add output to _buf.
- if append_to_buf: _buf = locals['_buf']
- else: _buf = None
+ if append_to_buf:
+ _buf = locals['_buf']
+ else:
+ _buf = None
#: render template and return output.
s = template.render(context, globals, _buf=_buf)
#: kwargs are removed from context data.
@@ -1967,10 +2128,10 @@ class Engine(object):
globals = sys._getframe(1).f_globals
self.hook_context(context)
while True:
- ## context and globals are passed to get_template() only for preprocessing
+ # context and globals are passed to get_template() only for preprocessing
template = self.get_template(template_name, context, globals)
- content = template.render(context, globals)
- layout = context.pop('_layout', layout)
+ content = template.render(context, globals)
+ layout = context.pop('_layout', layout)
if layout is True or layout is None:
layout = self.layout
if not layout:
@@ -1990,7 +2151,7 @@ class Engine(object):
##
-## safe template and engine
+# safe template and engine
##
class SafeTemplate(Template):
@@ -1998,7 +2159,7 @@ class SafeTemplate(Template):
'#{...}' is not allowed with this class. Use '[==...==]' instead.
"""
- tostrfunc = 'to_str'
+ tostrfunc = 'to_str'
escapefunc = 'to_escaped'
def get_expr_and_flags(self, match):
@@ -2007,7 +2168,7 @@ class SafeTemplate(Template):
class SafePreprocessor(Preprocessor):
- tostrfunc = 'to_str'
+ tostrfunc = 'to_str'
escapefunc = 'to_escaped'
def get_expr_and_flags(self, match):
@@ -2018,20 +2179,23 @@ def _get_expr_and_flags(match, errmsg):
expr1, expr2, expr3, expr4 = match.groups()
if expr1 is not None:
raise TemplateSyntaxError(errmsg % match.group(1))
- if expr2 is not None: return expr2, (True, False) # #{...} : call escape, not to_str
- if expr3 is not None: return expr3, (False, True) # [==...==] : not escape, call to_str
- if expr4 is not None: return expr4, (True, False) # [=...=] : call escape, not to_str
+ if expr2 is not None:
+ return expr2, (True, False) # #{...} : call escape, not to_str
+ if expr3 is not None:
+ return expr3, (False, True) # [==...==] : not escape, call to_str
+ if expr4 is not None:
+ return expr4, (True, False) # [=...=] : call escape, not to_str
class SafeEngine(Engine):
- templateclass = SafeTemplate
+ templateclass = SafeTemplate
preprocessorclass = SafePreprocessor
##
-## for Google App Engine
-## (should separate into individual file or module?)
+# for Google App Engine
+# (should separate into individual file or module?)
##
def _dummy():
@@ -2045,44 +2209,55 @@ def _dummy():
def __init__(self, lifetime=None, namespace=None):
CacheStorage.__init__(self)
- if lifetime is not None: self.lifetime = lifetime
+ if lifetime is not None:
+ self.lifetime = lifetime
self.namespace = namespace
def _load(self, cachepath):
key = cachepath
- if _tenjin.logger: _tenjin.logger.info("[tenjin.gae.GaeMemcacheCacheStorage] load cache (key=%r)" % (key, ))
+ if _tenjin.logger:
+ _tenjin.logger.info(
+ "[tenjin.gae.GaeMemcacheCacheStorage] load cache (key=%r)" % (key, ))
return memcache.get(key, namespace=self.namespace)
def _store(self, cachepath, dct):
dct.pop('bytecode', None)
key = cachepath
- if _tenjin.logger: _tenjin.logger.info("[tenjin.gae.GaeMemcacheCacheStorage] store cache (key=%r)" % (key, ))
- ret = memcache.set(key, dct, self.lifetime, namespace=self.namespace)
+ if _tenjin.logger:
+ _tenjin.logger.info(
+ "[tenjin.gae.GaeMemcacheCacheStorage] store cache (key=%r)" % (key, ))
+ ret = memcache.set(key, dct, self.lifetime,
+ namespace=self.namespace)
if not ret:
- if _tenjin.logger: _tenjin.logger.info("[tenjin.gae.GaeMemcacheCacheStorage] failed to store cache (key=%r)" % (key, ))
+ if _tenjin.logger:
+ _tenjin.logger.info(
+ "[tenjin.gae.GaeMemcacheCacheStorage] failed to store cache (key=%r)" % (key, ))
def _delete(self, cachepath):
key = cachepath
memcache.delete(key, namespace=self.namespace)
-
class GaeMemcacheStore(KeyValueStore):
lifetime = 0
def __init__(self, lifetime=None, namespace=None):
- if lifetime is not None: self.lifetime = lifetime
+ if lifetime is not None:
+ self.lifetime = lifetime
self.namespace = namespace
def get(self, key):
return memcache.get(key, namespace=self.namespace)
def set(self, key, value, lifetime=None):
- if lifetime is None: lifetime = self.lifetime
+ if lifetime is None:
+ lifetime = self.lifetime
if memcache.set(key, value, lifetime, namespace=self.namespace):
return True
else:
- if _tenjin.logger: _tenjin.logger.info("[tenjin.gae.GaeMemcacheStore] failed to set (key=%r)" % (key, ))
+ if _tenjin.logger:
+ _tenjin.logger.info(
+ "[tenjin.gae.GaeMemcacheStore] failed to set (key=%r)" % (key, ))
return False
def delete(self, key):
@@ -2095,19 +2270,19 @@ def _dummy():
else:
return True
-
def init():
global memcache, _tenjin
if not memcache:
from google.appengine.api import memcache
- if not _tenjin: import tenjin as _tenjin
- ## avoid cache confliction between versions
- ver = os.environ.get('CURRENT_VERSION_ID', '1.1')#.split('.')[0]
+ if not _tenjin:
+ import tenjin as _tenjin
+ # avoid cache confliction between versions
+ ver = os.environ.get('CURRENT_VERSION_ID', '1.1') # .split('.')[0]
Engine.cache = GaeMemcacheCacheStorage(namespace=ver)
- ## set fragment cache store
- helpers.fragment_cache.store = GaeMemcacheStore(namespace=ver)
- helpers.fragment_cache.lifetime = 60 # 1 minute
- helpers.fragment_cache.prefix = 'fragment.'
+ # set fragment cache store
+ helpers.fragment_cache.store = GaeMemcacheStore(namespace=ver)
+ helpers.fragment_cache.lifetime = 60 # 1 minute
+ helpers.fragment_cache.prefix = 'fragment.'
gae = create_module('tenjin.gae', _dummy,
diff --git a/cgi/weabot.py b/cgi/weabot.py
index 398ebfa..1406707 100755
--- a/cgi/weabot.py
+++ b/cgi/weabot.py
@@ -30,995 +30,1057 @@ _DEBUG = False
# Set to True to save performance data to weabot.txt
_LOG = False
+
class weabot(object):
- def __init__(self, environ, start_response):
- global _DEBUG
- self.environ = environ
- if self.environ["PATH_INFO"].startswith("/weabot.py/"):
- self.environ["PATH_INFO"] = self.environ["PATH_INFO"][11:]
-
- self.start = start_response
- self.formdata = getFormData(self)
- self.output = ""
-
- self.handleRequest()
-
- # Localization Code
- lang = gettext.translation('weabot', './locale', languages=[Settings.LANG])
- lang.install()
-
- logTime("**Start**")
- if _DEBUG:
- import cProfile
-
- prof = cProfile.Profile()
- prof.runcall(self.run)
- prof.dump_stats('stats.prof')
- else:
- try:
- self.run()
- except UserError, message:
- self.error(message)
- except Exception, inst:
- import sys, traceback
- exc_type, exc_value, exc_traceback = sys.exc_info()
- detail = ((os.path.basename(o[0]),o[1],o[2],o[3]) for o in traceback.extract_tb(exc_traceback))
- self.exception(type(inst), inst, detail)
-
- # close database and finish
- CloseDb()
- logTime("**End**")
-
- if _LOG:
- logfile = open(Settings.ROOT_DIR + "weabot.txt", "w")
- logfile.write(logTimes())
- logfile.close()
-
- def __iter__(self):
- self.handleResponse()
- self.start("200 OK", self.headers)
- yield self.output
-
- def error(self, message):
- board = Settings._.BOARD
- if board:
- if board['board_type'] == '1':
- info = {}
- info['host'] = self.environ["REMOTE_ADDR"]
- info['name'] = self.formdata.get('fielda', '')
- info['email'] = self.formdata.get('fieldb', '')
- info['message'] = self.formdata.get('message', '')
-
- self.output += renderTemplate("txt_error.html", {"info": info, "error": message})
- else:
- mobile = self.formdata.get('mobile', '')
- if mobile:
- self.output += renderTemplate("mobile/error.html", {"error": message})
+ def __init__(self, environ, start_response):
+ global _DEBUG
+ self.environ = environ
+ if self.environ["PATH_INFO"].startswith("/weabot.py/"):
+ self.environ["PATH_INFO"] = self.environ["PATH_INFO"][11:]
+
+ self.start = start_response
+ self.formdata = getFormData(self)
+ self.output = ""
+
+ self.handleRequest()
+
+ # Localization Code
+ lang = gettext.translation(
+ 'weabot', './locale', languages=[Settings.LANG])
+ lang.install()
+
+ logTime("**Start**")
+ if _DEBUG:
+ import cProfile
+
+ prof = cProfile.Profile()
+ prof.runcall(self.run)
+ prof.dump_stats('stats.prof')
else:
- self.output += renderTemplate("error.html", {"error": message, "boards_url": Settings.BOARDS_URL, "board": board["dir"]})
- else:
- self.output += renderTemplate("exception.html", {"exception": None, "error": message})
-
- def exception(self, type, message, detail):
- self.output += renderTemplate("exception.html", {"exception": type, "error": message, "detail": detail})
-
- def handleRequest(self):
- self.headers = [("Content-Type", "text/html")]
- self.handleCookies()
-
- def handleResponse(self):
- if self._newcookies:
- for newcookie in self._newcookies.values():
- self.headers.append(("Set-Cookie", newcookie.output(header="")))
-
- def handleCookies(self):
- self._cookies = SimpleCookie()
- self._cookies.load(self.environ.get("HTTP_COOKIE", ""))
- self._newcookies = None
-
- def run(self):
- path_split = self.environ["PATH_INFO"].split("/")
- caught = False
-
- if Settings.FULL_MAINTENANCE:
- raise UserError, _("%s is currently under maintenance. We'll be back.") % Settings.SITE_TITLE
-
- if len(path_split) > 1:
- if path_split[1] == "post":
- # Making a post
- caught = True
-
- if 'password' not in self.formdata:
- raise UserError, "El request está incompleto."
-
- # let's get all the POST data we need
- ip = self.environ["REMOTE_ADDR"]
- boarddir = self.formdata.get('board')
- parent = self.formdata.get('parent')
- trap1 = self.formdata.get('name', '')
- trap2 = self.formdata.get('email', '')
- name = self.formdata.get('fielda', '')
- email = self.formdata.get('fieldb', '')
- subject = self.formdata.get('subject', '')
- message = self.formdata.get('message', '')
- file = self.formdata.get('file')
- file_original = self.formdata.get('file_original')
- spoil = self.formdata.get('spoil')
- oek_file = self.formdata.get('oek_file')
- password = self.formdata.get('password', '')
- noimage = self.formdata.get('noimage')
- mobile = ("mobile" in self.formdata.keys())
-
- # call post function
- (post_url, ttaken, unused) = self.make_post(ip, boarddir, parent, trap1, trap2, name, email, subject, message, file, file_original, spoil, oek_file, password, noimage, mobile)
-
- # make redirect
- self.output += make_redirect(post_url, ttaken)
- elif path_split[1] == "environ":
- caught = True
-
- self.output += repr(self.environ)
- elif path_split[1] == "delete":
- # Deleting a post
- caught = True
-
- boarddir = self.formdata.get('board')
- postid = self.formdata.get('delete')
- imageonly = self.formdata.get('imageonly')
- password = self.formdata.get('password')
- mobile = self.formdata.get('mobile')
-
- # call delete function
- self.delete_post(boarddir, postid, imageonly, password, mobile)
- elif path_split[1] == "anarkia":
- import anarkia
- caught = True
- OpenDb()
- anarkia.anarkia(self, path_split)
- elif path_split[1] == "manage":
- caught = True
- OpenDb()
- manage.manage(self, path_split)
- elif path_split[1] == "api":
- import api
- caught = True
- self.headers = [("Content-Type", "application/json"), ("Access-Control-Allow-Origin", "*"), ("Access-Control-Allow-Methods", "PUT, GET, POST, DELETE, OPTIONS"), ("Access-Control-Allow-Headers", "Content-Type, Access-Control-Allow-Headers, Authorization, X-Requested-With")]
- OpenDb()
- api.api(self, path_split)
- elif path_split[1] == "threadlist":
- OpenDb()
- board = setBoard(path_split[2])
- caught = True
- if board['board_type'] != '1':
- raise UserError, "No disponible para esta sección."
- self.output = threadList(0)
- elif path_split[1] == "mobile":
- OpenDb()
- board = setBoard(path_split[2])
- caught = True
- self.output = threadList(1)
- elif path_split[1] == "mobilelist":
- OpenDb()
- board = setBoard(path_split[2])
- caught = True
- self.output = threadList(2)
- elif path_split[1] == "mobilecat":
- OpenDb()
- board = setBoard(path_split[2])
- caught = True
- self.output = threadList(3)
- elif path_split[1] == "mobilenew":
- OpenDb()
- board = setBoard(path_split[2])
- caught = True
- self.output = renderTemplate('txt_newthread.html', {}, True)
- elif path_split[1] == "mobilehome":
- OpenDb()
- latest_age = getLastAge(Settings.HOME_LASTPOSTS)
- for threads in latest_age:
- content = threads['url']
- content = content.replace('/read/', '/')
- content = content.replace('/res/', '/')
- content = content.replace('.html', '')
- threads['url'] = content
- caught = True
- self.output = renderTemplate('latest.html', {'latest_age': latest_age}, True)
- elif path_split[1] == "mobilenewest":
- OpenDb()
- newthreads = getNewThreads(Settings.HOME_LASTPOSTS)
- for threads in newthreads:
- content = threads['url']
- content = content.replace('/read/', '/')
- content = content.replace('/res/', '/')
- content = content.replace('.html', '')
- threads['url'] = content
- caught = True
- self.output = renderTemplate('newest.html', {'newthreads': newthreads}, True)
- elif path_split[1] == "mobileread":
- OpenDb()
- board = setBoard(path_split[2])
- caught = True
- if len(path_split) > 4 and path_split[4] and board['board_type'] == '1':
- #try:
- self.output = dynamicRead(int(path_split[3]), path_split[4], True)
- #except:
- # self.output = threadPage(path_split[3], True)
- elif board['board_type'] == '1':
- self.output = threadPage(0, True, path_split[3])
+ try:
+ self.run()
+ except UserError, message:
+ self.error(message)
+ except Exception, inst:
+ import sys
+ import traceback
+ exc_type, exc_value, exc_traceback = sys.exc_info()
+ detail = ((os.path.basename(o[0]), o[1], o[2], o[3])
+ for o in traceback.extract_tb(exc_traceback))
+ self.exception(type(inst), inst, detail)
+
+ # close database and finish
+ CloseDb()
+ logTime("**End**")
+
+ if _LOG:
+ logfile = open(Settings.ROOT_DIR + "weabot.txt", "w")
+ logfile.write(logTimes())
+ logfile.close()
+
+ def __iter__(self):
+ self.handleResponse()
+ self.start("200 OK", self.headers)
+ yield self.output
+
+ def error(self, message):
+ board = Settings._.BOARD
+ if board:
+ if board['board_type'] == '1':
+ info = {}
+ info['host'] = self.environ["REMOTE_ADDR"]
+ info['name'] = self.formdata.get('fielda', '')
+ info['email'] = self.formdata.get('fieldb', '')
+ info['message'] = self.formdata.get('message', '')
+
+ self.output += renderTemplate("txt_error.html",
+ {"info": info, "error": message})
+ else:
+ mobile = self.formdata.get('mobile', '')
+ if mobile:
+ self.output += renderTemplate("mobile/error.html",
+ {"error": message})
+ else:
+ self.output += renderTemplate("error.html", {
+ "error": message, "boards_url": Settings.BOARDS_URL, "board": board["dir"]})
else:
- self.output = threadPage(path_split[3], True)
- elif path_split[1] == "catalog":
- OpenDb()
- board = setBoard(path_split[2])
- caught = True
- sort = self.formdata.get('sort', '')
- self.output = catalog(sort)
- elif path_split[1] == "oekaki":
- caught = True
- OpenDb()
- oekaki.oekaki(self, path_split)
- elif path_split[1] == "play":
- # Module player
- caught = True
- boarddir = path_split[2]
- modfile = path_split[3]
- self.output = renderTemplate('mod.html', {'board': boarddir, 'modfile': modfile})
- elif path_split[1] == "report":
- # Report post, check if they are enabled
- # Can't report if banned
- caught = True
- ip = self.environ["REMOTE_ADDR"]
- boarddir = path_split[2]
- postid = int(path_split[3])
- reason = self.formdata.get('reason')
- try:
- txt = True
- postshow = int(path_split[4])
- except:
- txt = False
- postshow = postid
-
- self.report(ip, boarddir, postid, reason, txt, postshow)
- elif path_split[1] == "stats":
- caught = True
- self.stats()
- elif path_split[1] == "random":
- caught = True
+ self.output += renderTemplate("exception.html",
+ {"exception": None, "error": message})
+
+ def exception(self, type, message, detail):
+ self.output += renderTemplate("exception.html",
+ {"exception": type, "error": message, "detail": detail})
+
+ def handleRequest(self):
+ self.headers = [("Content-Type", "text/html")]
+ self.handleCookies()
+
+ def handleResponse(self):
+ if self._newcookies:
+ for newcookie in self._newcookies.values():
+ self.headers.append(
+ ("Set-Cookie", newcookie.output(header="")))
+
+ def handleCookies(self):
+ self._cookies = SimpleCookie()
+ self._cookies.load(self.environ.get("HTTP_COOKIE", ""))
+ self._newcookies = None
+
+ def run(self):
+ path_split = self.environ["PATH_INFO"].split("/")
+ caught = False
+
+ if Settings.FULL_MAINTENANCE:
+ raise UserError, _(
+ "%s is currently under maintenance. We'll be back.") % Settings.SITE_TITLE
+
+ if len(path_split) > 1:
+ if path_split[1] == "post":
+ # Making a post
+ caught = True
+
+ if 'password' not in self.formdata:
+ raise UserError, "El request está incompleto."
+
+ # let's get all the POST data we need
+ ip = self.environ["REMOTE_ADDR"]
+ boarddir = self.formdata.get('board')
+ parent = self.formdata.get('parent')
+ trap1 = self.formdata.get('name', '')
+ trap2 = self.formdata.get('email', '')
+ name = self.formdata.get('fielda', '')
+ email = self.formdata.get('fieldb', '')
+ subject = self.formdata.get('subject', '')
+ message = self.formdata.get('message', '')
+ file = self.formdata.get('file')
+ file_original = self.formdata.get('file_original')
+ spoil = self.formdata.get('spoil')
+ oek_file = self.formdata.get('oek_file')
+ password = self.formdata.get('password', '')
+ noimage = self.formdata.get('noimage')
+ mobile = ("mobile" in self.formdata.keys())
+
+ # call post function
+ (post_url, ttaken, unused) = self.make_post(ip, boarddir, parent, trap1, trap2, name,
+ email, subject, message, file, file_original, spoil, oek_file, password, noimage, mobile)
+
+ # make redirect
+ self.output += make_redirect(post_url, ttaken)
+ elif path_split[1] == "environ":
+ caught = True
+
+ self.output += repr(self.environ)
+ elif path_split[1] == "delete":
+ # Deleting a post
+ caught = True
+
+ boarddir = self.formdata.get('board')
+ postid = self.formdata.get('delete')
+ imageonly = self.formdata.get('imageonly')
+ password = self.formdata.get('password')
+ mobile = self.formdata.get('mobile')
+
+ # call delete function
+ self.delete_post(boarddir, postid, imageonly, password, mobile)
+ elif path_split[1] == "anarkia":
+ import anarkia
+ caught = True
+ OpenDb()
+ anarkia.anarkia(self, path_split)
+ elif path_split[1] == "manage":
+ caught = True
+ OpenDb()
+ manage.manage(self, path_split)
+ elif path_split[1] == "api":
+ import api
+ caught = True
+ self.headers = [("Content-Type", "application/json"), ("Access-Control-Allow-Origin", "*"), ("Access-Control-Allow-Methods",
+ "PUT, GET, POST, DELETE, OPTIONS"), ("Access-Control-Allow-Headers", "Content-Type, Access-Control-Allow-Headers, Authorization, X-Requested-With")]
+ OpenDb()
+ api.api(self, path_split)
+ elif path_split[1] == "threadlist":
+ OpenDb()
+ board = setBoard(path_split[2])
+ caught = True
+ if board['board_type'] != '1':
+ raise UserError, "No disponible para esta sección."
+ self.output = threadList(0)
+ elif path_split[1] == "mobile":
+ OpenDb()
+ board = setBoard(path_split[2])
+ caught = True
+ self.output = threadList(1)
+ elif path_split[1] == "mobilelist":
+ OpenDb()
+ board = setBoard(path_split[2])
+ caught = True
+ self.output = threadList(2)
+ elif path_split[1] == "mobilecat":
+ OpenDb()
+ board = setBoard(path_split[2])
+ caught = True
+ self.output = threadList(3)
+ elif path_split[1] == "mobilenew":
+ OpenDb()
+ board = setBoard(path_split[2])
+ caught = True
+ self.output = renderTemplate('txt_newthread.html', {}, True)
+ elif path_split[1] == "mobilehome":
+ OpenDb()
+ latest_age = getLastAge(Settings.HOME_LASTPOSTS)
+ for threads in latest_age:
+ content = threads['url']
+ content = content.replace('/read/', '/')
+ content = content.replace('/res/', '/')
+ content = content.replace('.html', '')
+ threads['url'] = content
+ caught = True
+ self.output = renderTemplate(
+ 'latest.html', {'latest_age': latest_age}, True)
+ elif path_split[1] == "mobilenewest":
+ OpenDb()
+ newthreads = getNewThreads(Settings.HOME_LASTPOSTS)
+ for threads in newthreads:
+ content = threads['url']
+ content = content.replace('/read/', '/')
+ content = content.replace('/res/', '/')
+ content = content.replace('.html', '')
+ threads['url'] = content
+ caught = True
+ self.output = renderTemplate(
+ 'newest.html', {'newthreads': newthreads}, True)
+ elif path_split[1] == "mobileread":
+ OpenDb()
+ board = setBoard(path_split[2])
+ caught = True
+ if len(path_split) > 4 and path_split[4] and board['board_type'] == '1':
+ # try:
+ self.output = dynamicRead(
+ int(path_split[3]), path_split[4], True)
+ # except:
+ # self.output = threadPage(path_split[3], True)
+ elif board['board_type'] == '1':
+ self.output = threadPage(0, True, path_split[3])
+ else:
+ self.output = threadPage(path_split[3], True)
+ elif path_split[1] == "catalog":
+ OpenDb()
+ board = setBoard(path_split[2])
+ caught = True
+ sort = self.formdata.get('sort', '')
+ self.output = catalog(sort)
+ elif path_split[1] == "oekaki":
+ caught = True
+ OpenDb()
+ oekaki.oekaki(self, path_split)
+ elif path_split[1] == "play":
+ # Module player
+ caught = True
+ boarddir = path_split[2]
+ modfile = path_split[3]
+ self.output = renderTemplate(
+ 'mod.html', {'board': boarddir, 'modfile': modfile})
+ elif path_split[1] == "report":
+ # Report post, check if they are enabled
+ # Can't report if banned
+ caught = True
+ ip = self.environ["REMOTE_ADDR"]
+ boarddir = path_split[2]
+ postid = int(path_split[3])
+ reason = self.formdata.get('reason')
+ try:
+ txt = True
+ postshow = int(path_split[4])
+ except:
+ txt = False
+ postshow = postid
+
+ self.report(ip, boarddir, postid, reason, txt, postshow)
+ elif path_split[1] == "stats":
+ caught = True
+ self.stats()
+ elif path_split[1] == "random":
+ caught = True
+ OpenDb()
+ board = FetchOne(
+ "SELECT `id`, `dir`, `board_type` FROM `boards` WHERE `secret` = 0 AND `id` <> 1 AND `id` <> 13 AND `id` <> 34 ORDER BY RAND() LIMIT 1")
+ thread = FetchOne(
+ "SELECT `id`, `timestamp` FROM `posts` WHERE `parentid` = 0 AND `boardid` = %s ORDER BY RAND() LIMIT 1" % board['id'])
+ if board['board_type'] == '1':
+ url = Settings.HOME_URL + \
+ board['dir'] + '/read/' + thread['timestamp'] + '/'
+ else:
+ url = Settings.HOME_URL + \
+ board['dir'] + '/res/' + thread['id'] + '.html'
+ self.output += '<html xmlns="http://www.w3.org/1999/xhtml"><meta http-equiv="refresh" content="0;url=%s" /><body><p>...</p></body></html>' % url
+ elif path_split[1] == "nostalgia":
+ caught = True
+ OpenDb()
+ thread = FetchOne(
+ "SELECT `timestamp` FROM `archive` WHERE `boardid` = 9 AND `timestamp` < 1462937230 ORDER BY RAND() LIMIT 1")
+ url = Settings.HOME_URL + '/zonavip/read/' + \
+ thread['timestamp'] + '/'
+ self.output += '<html xmlns="http://www.w3.org/1999/xhtml"><meta http-equiv="refresh" content="0;url=%s" /><body><p>...</p></body></html>' % url
+ elif path_split[1] == "banned":
+ OpenDb()
+ packed_ip = inet_aton(self.environ["REMOTE_ADDR"])
+ bans = FetchAll("SELECT * FROM `bans` WHERE (`netmask` IS NULL AND `ip` = '"+str(
+ packed_ip)+"') OR (`netmask` IS NOT NULL AND '"+str(packed_ip)+"' & `netmask` = `ip`)")
+ if bans:
+ for ban in bans:
+ if ban["boards"] != "":
+ boards = pickle.loads(ban["boards"])
+ if ban["boards"] == "" or path_split[2] in boards:
+ caught = True
+ if ban["boards"]:
+ boards_str = '/' + '/, /'.join(boards) + '/'
+ else:
+ boards_str = _("all boards")
+ if ban["until"] != "0":
+ expire = formatTimestamp(ban["until"])
+ else:
+ expire = ""
+
+ template_values = {
+ # 'return_board': path_split[2],
+ 'boards_str': boards_str,
+ 'reason': ban['reason'],
+ 'added': formatTimestamp(ban["added"]),
+ 'expire': expire,
+ 'ip': self.environ["REMOTE_ADDR"],
+ }
+ self.output = renderTemplate(
+ 'banned.html', template_values)
+ else:
+ if len(path_split) > 2:
+ caught = True
+ self.output += '<html xmlns="http://www.w3.org/1999/xhtml"><body><meta http-equiv="refresh" content="0;url=%s" /><p>%s</p></body></html>' % (
+ Settings.HOME_URL + path_split[2], _("Your ban has expired. Redirecting..."))
+ elif path_split[1] == "read":
+ # Textboard read:
+ if len(path_split) > 4:
+ caught = True
+ # 2: board
+ # 3: thread
+ # 4: post(s)
+ OpenDb()
+ board = setBoard(path_split[2])
+ self.output = dynamicRead(
+ int(path_split[3]), path_split[4])
+ elif path_split[1] == "preview":
+ caught = True
+ OpenDb()
+ try:
+ board = setBoard(self.formdata["board"])
+ message = format_post(
+ self.formdata["message"], self.environ["REMOTE_ADDR"], self.formdata["parentid"])
+ self.output = message
+ except Exception, messagez:
+ self.output = "Error: " + \
+ str(messagez) + " : " + str(self.formdata)
+ if not caught:
+ # Redirect the user back to the front page
+ self.output += '<html xmlns="http://www.w3.org/1999/xhtml"><body><meta http-equiv="refresh" content="0;url=%s" /><p>--&gt; --&gt; --&gt;</p></body></html>' % Settings.HOME_URL
+
+ def make_post(self, ip, boarddir, parent, trap1, trap2, name, email, subject, message, file, file_original, spoil, oek_file, password, noimage, mobile):
+ _STARTTIME = time.clock() # Comment if not debug
+
+ if addressIsUS(ip):
+ raise UserError, "Host en lista negra."
+
+ # open database
OpenDb()
- board = FetchOne("SELECT `id`, `dir`, `board_type` FROM `boards` WHERE `secret` = 0 AND `id` <> 1 AND `id` <> 13 AND `id` <> 34 ORDER BY RAND() LIMIT 1")
- thread = FetchOne("SELECT `id`, `timestamp` FROM `posts` WHERE `parentid` = 0 AND `boardid` = %s ORDER BY RAND() LIMIT 1" % board['id'])
+
+ # set the board
+ board = setBoard(boarddir)
+
+ if board["dir"] != ["anarkia"]:
+ if addressIsProxy(ip):
+ raise UserError, "Proxy prohibido en esta sección."
+
+ # check length of fields
+ if len(name) > 50:
+ raise UserError, "El campo de nombre es muy largo."
+ if len(email) > 50:
+ raise UserError, "El campo de e-mail es muy largo."
+ if len(subject) > 100:
+ raise UserError, "El campo de asunto es muy largo."
+ if len(message) > 8000:
+ raise UserError, "El campo de mensaje es muy largo."
+ if message.count('\n') > 50:
+ raise UserError, "El mensaje tiene muchos saltos de línea."
+
+ # anti-spam trap
+ if trap1 or trap2:
+ raise UserError, "Te quedan tres días de vida."
+
+ # Create a single datetime now so everything syncs up
+ t = time.time()
+
+ # Delete expired bans
+ deletedBans = UpdateDb(
+ "DELETE FROM `bans` WHERE `until` != 0 AND `until` < " + str(timestamp()))
+ if deletedBans > 0:
+ regenerateAccess()
+
+ # Redirect to ban page if user is banned
+ if addressIsBanned(ip, board["dir"]):
+ #raise UserError, 'Tu host está en la lista negra.'
+ raise UserError, '<meta http-equiv="refresh" content="0; url=/cgi/banned/%s">' % board["dir"]
+
+ # Disallow posting if the site OR board is in maintenance
+ if Settings.MAINTENANCE:
+ raise UserError, _(
+ "%s is currently under maintenance. We'll be back.") % Settings.SITE_TITLE
+ if board["locked"] == '1':
+ raise UserError, _("This board is closed. You can't post in it.")
+
+ # create post object
+ post = Post(board["id"])
+ post["ip"] = inet_aton(ip)
+ post["timestamp"] = post["bumped"] = int(t)
+ post["timestamp_formatted"] = formatTimestamp(t)
+
+ # load parent info if we are replying
+ parent_post = None
+ parent_timestamp = post["timestamp"]
+ if parent:
+ parent_post = get_parent_post(parent, board["id"])
+ parent_timestamp = parent_post['timestamp']
+ post["parentid"] = parent_post['id']
+ post["bumped"] = parent_post['bumped']
+ if parent_post['locked'] == '1':
+ raise UserError, _(
+ "The thread is closed. You can't post in it.")
+
+ # check if the user is flooding
+ flood_check(t, post, board["id"])
+
+ # use fields only if enabled
+ if board["disable_name"] != '1':
+ post["name"] = cleanString(name)
+ post["email"] = cleanString(email, quote=True)
+ if board["disable_subject"] != '1':
+ post["subject"] = cleanString(subject)
+
+ # process tripcodes
+ post["name"], post["tripcode"] = tripcode(post["name"])
+
+ # Remove carriage return, they're useless
+ message = message.replace("\r", "")
+
+ # check ! functions before
+ extend = extend_str = dice = ball = None
+
+ if not post["parentid"] and board["dir"] not in ['bai', 'world']:
+ # creating thread
+ __extend = re.compile(r"^!extend(:\w+)(:\w+)?\n")
+ res = __extend.match(message)
+ if res:
+ extend = res.groups()
+ # truncate extend
+ extend_str = res.group(0)
+ message = message[res.end(0):]
+
+ if board["dir"] in ['juegos', '0', 'polka']:
+ __dice = re.compile(r"^!dado(:\w+)(:\w+)?\n")
+ res = __dice.match(message)
+ if res:
+ dice = res.groups()
+ message = message[res.end(0):]
+
+ if board["dir"] in ['zonavip', '0', 'polka']:
+ __ball = re.compile(r"^!bola8\n")
+ res = __ball.match(message)
+ if res:
+ ball = True
+ message = message[res.end(0):]
+
+ # use and format message
+ if message.strip():
+ post["message"] = format_post(
+ message, ip, post["parentid"], parent_timestamp)
+
+ # add function messages
+ if extend_str:
+ extend_str = extend_str.replace('!extend', 'EXTEND')
+ post["message"] += '<hr />' + extend_str + ' configurado.'
+ if dice:
+ post["message"] += '<hr />' + throw_dice(dice)
+ if ball:
+ post["message"] += '<hr />' + magic_ball()
+
+ if not post["parentid"] and post["email"].lower() == 'sage':
+ post["email"] = ""
+
+ # disallow illegal characters
+ if post["name"]:
+ post["name"] = post["name"].replace('★', '☆')
+ post["name"] = post["name"].replace('◆', '◇')
+
+ # process capcodes
+ cap_id = hide_end = None
+ if post["name"] in Settings.CAPCODES:
+ capcode = Settings.CAPCODES[post["name"]]
+ if post["tripcode"] == (Settings.TRIP_CHAR + capcode[0]):
+ post["name"], post["tripcode"] = capcode[1], capcode[2]
+ # if board['board_type'] == '1':
+ # post["name"], post["tripcode"] = capcode[1], ''
+ # else:
+ # post["name"] = post["tripcode"] = ''
+ # post["message"] = ('[<span style="color:red">%s</span>]<br />' % capcode[2]) + post["message"]
+
+ cap_id, hide_end = capcode[3], capcode[4]
+
+ # hide ip if necessary
+ if hide_end:
+ post["ip"] = 0
+
+ # use password
+ post["password"] = password
+
+ # EXTEND feature
+ if post["parentid"] and board["dir"] not in ['bai', 'world']:
+ # replying
+ __extend = re.compile(r"<hr />EXTEND(:\w+)(:\w+)?\b")
+ res = __extend.search(parent_post["message"])
+ if res:
+ extend = res.groups()
+
+ # compatibility : old id function
+ if 'id' in parent_post["email"]:
+ board["useid"] = '3'
+
+ if 'id' in post["email"]:
+ board["useid"] = '3'
+
+ if extend:
+ try:
+ # 1: ID
+ if extend[0] == ':no':
+ board["useid"] = '0'
+ elif extend[0] == ':yes':
+ board["useid"] = '1'
+ elif extend[0] == ':force':
+ board["useid"] = '2'
+ elif extend[0] == ':extra':
+ board["useid"] = '3'
+
+ # 2: Slip
+ if extend[1] == ':no':
+ board["slip"] = '0'
+ elif extend[1] == ':yes':
+ board["slip"] = '1'
+ elif extend[1] == ':domain':
+ board["slip"] = '2'
+ elif extend[1] == ':verbose':
+ board["slip"] = '3'
+ elif extend[1] == ':country':
+ board["countrycode"] = '1'
+ elif extend[1] == ':all':
+ board["slip"] = '3'
+ board["countrycode"] = '1'
+ except IndexError:
+ pass
+
+ # if we are replying, use first post's time
+ if post["parentid"]:
+ tim = parent_post["timestamp"]
+ else:
+ tim = post["timestamp"]
+
+ # make ID hash
+ if board["useid"] != '0':
+ post["timestamp_formatted"] += ' ID:' + iphash(ip, post, tim, board["useid"], mobile,
+ self.environ["HTTP_USER_AGENT"], cap_id, hide_end, (board["countrycode"] in ['1', '2']))
+
+ # use for future file checks
+ xfile = (file or oek_file)
+
+ # textboard inforcements (change it to settings maybe?)
if board['board_type'] == '1':
- url = Settings.HOME_URL + board['dir'] + '/read/' + thread['timestamp'] + '/'
+ if not post["parentid"] and not post["subject"]:
+ raise UserError, _(
+ "You must enter a title to create a thread.")
+ if not post["message"]:
+ raise UserError, _("Please enter a message.")
else:
- url = Settings.HOME_URL + board['dir'] + '/res/' + thread['id'] + '.html'
- self.output += '<html xmlns="http://www.w3.org/1999/xhtml"><meta http-equiv="refresh" content="0;url=%s" /><body><p>...</p></body></html>' % url
- elif path_split[1] == "nostalgia":
- caught = True
- OpenDb()
- thread = FetchOne("SELECT `timestamp` FROM `archive` WHERE `boardid` = 9 AND `timestamp` < 1462937230 ORDER BY RAND() LIMIT 1")
- url = Settings.HOME_URL + '/zonavip/read/' + thread['timestamp'] + '/'
- self.output += '<html xmlns="http://www.w3.org/1999/xhtml"><meta http-equiv="refresh" content="0;url=%s" /><body><p>...</p></body></html>' % url
- elif path_split[1] == "banned":
- OpenDb()
- packed_ip = inet_aton(self.environ["REMOTE_ADDR"])
- bans = FetchAll("SELECT * FROM `bans` WHERE (`netmask` IS NULL AND `ip` = '"+str(packed_ip)+"') OR (`netmask` IS NOT NULL AND '"+str(packed_ip)+"' & `netmask` = `ip`)")
- if bans:
- for ban in bans:
- if ban["boards"] != "":
- boards = pickle.loads(ban["boards"])
- if ban["boards"] == "" or path_split[2] in boards:
- caught = True
- if ban["boards"]:
- boards_str = '/' + '/, /'.join(boards) + '/'
- else:
- boards_str = _("all boards")
- if ban["until"] != "0":
- expire = formatTimestamp(ban["until"])
- else:
- expire = ""
-
- template_values = {
- #'return_board': path_split[2],
- 'boards_str': boards_str,
- 'reason': ban['reason'],
- 'added': formatTimestamp(ban["added"]),
- 'expire': expire,
- 'ip': self.environ["REMOTE_ADDR"],
- }
- self.output = renderTemplate('banned.html', template_values)
+ if not post["parentid"] and not xfile and not noimage:
+ raise UserError, _(
+ "You must upload an image first to create a thread.")
+ if not xfile and not post["message"]:
+ raise UserError, _(
+ "Please enter a message or upload an image to reply.")
+
+ # check if this post is allowed
+ if post["parentid"]:
+ if file and board['allow_image_replies'] == '0':
+ raise UserError, _("Image replies not allowed.")
else:
- if len(path_split) > 2:
- caught = True
- self.output += '<html xmlns="http://www.w3.org/1999/xhtml"><body><meta http-equiv="refresh" content="0;url=%s" /><p>%s</p></body></html>' % (Settings.HOME_URL + path_split[2], _("Your ban has expired. Redirecting..."))
- elif path_split[1] == "read":
- # Textboard read:
- if len(path_split) > 4:
- caught = True
- # 2: board
- # 3: thread
- # 4: post(s)
- OpenDb()
- board = setBoard(path_split[2])
- self.output = dynamicRead(int(path_split[3]), path_split[4])
- elif path_split[1] == "preview":
- caught = True
+ if file and board['allow_images'] == '0':
+ raise UserError, _("No images allowed.")
+
+ # use default values when missing / remove sage from wrong fields
+ if (not post["name"] and not post["tripcode"]) or (post["name"].lower() == 'sage'):
+ post["name"] = random.choice(board["anonymous"].split('|'))
+ if (not post["subject"] and not post["parentid"]) or (post["subject"].lower() == 'sage'):
+ post["subject"] = board["subject"]
+ if not post["message"]:
+ post["message"] = board["message"]
+
+ # process files
+ if oek_file:
+ try:
+ fname = os.path.join(Settings.IMAGES_DIR,
+ board['dir'], "temp", oek_file + ".png")
+ with open(fname, 'rb') as f:
+ file = f.read()
+ except:
+ raise UserError, "Imposible leer la imagen oekaki."
+
+ if file and not noimage:
+ post = processImage(post, file, t, file_original,
+ (spoil and board['allow_spoilers'] == '1'))
+
+ if oek_file:
+ # Remove temporary oekaki file if everything went right
+ # os.remove(fname)
+ # TODO: We will rename the file for now. We don't want lost work.
+ try:
+ os.rename(fname, fname + ".bak")
+ except:
+ pass # Just keep it if anything went wrong
+
+ # slip
+ if board["slip"] != '0':
+ slips = []
+
+ # name
+ if board["slip"] in ['1', '3']:
+ if time.strftime("%H") in ['00', '24'] and time.strftime("%M") == '00' and time.strftime("%S") == '00':
+ host_nick = '000000'
+ else:
+ host_nick = 'sarin'
+
+ if hide_end:
+ host_nick = '★'
+ elif addressIsTor(ip):
+ host_nick = 'onion'
+ else:
+ isps = {'cablevision': 'easy',
+ 'cantv': 'warrior',
+ 'claro': 'america',
+ 'cnet': 'nova',
+ 'copelnet': 'cisneros',
+ 'cps.com': 'silver',
+ 'cybercable': 'bricklayer',
+ 'entel': 'matte',
+ 'eternet': 'stream',
+ 'fibertel': 'roughage',
+ 'geonet': 'thunder',
+ 'gtdinternet': 'casanueva',
+ 'ifxnw': 'effect',
+ 'infinitum': 'telegraph',
+ 'intercable': 'easy',
+ 'intercity': 'cordoba',
+ 'iplannet': 'conquest',
+ 'itcsa.net': 'sarmiento',
+ 'megared': 'clear',
+ 'movistar': 'bell',
+ 'nextel': 'fleet',
+ 'speedy': 'oxygen',
+ 'telecom': 'license',
+ 'telmex': 'slender',
+ 'telnor': 'compass',
+ 'tie.cl': 'bell',
+ 'vtr.net': 'liberty',
+ 'utfsm': 'virgin',
+ }
+ host = getHost(ip)
+
+ if host:
+ for k, v in isps.iteritems():
+ if k in host:
+ host_nick = v
+ break
+
+ slips.append(host_nick)
+
+ # hash
+ if board["slip"] in ['1', '3']:
+ if hide_end:
+ slips.append('-'.join(('****', '****')))
+ elif addressIsTor(ip):
+ slips.append(
+ '-'.join(('****', getMD5(self.environ["HTTP_USER_AGENT"])[:4])))
+ else:
+ slips.append(
+ '-'.join((getMD5(ip)[:4], getMD5(self.environ["HTTP_USER_AGENT"])[:4])))
+
+ # host
+ if board["slip"] == '2':
+ if hide_end:
+ host = '★'
+ elif addressIsTor(ip):
+ host = 'onion'
+ else:
+ host = getHost(ip)
+ if host:
+ hosts = host.split('.')
+ if len(hosts) > 2:
+ if hosts[-2] in ['ne', 'net', 'com', 'co']:
+ host = '.'.join(
+ (hosts[-3], hosts[-2], hosts[-1]))
+ else:
+ host = '.'.join((hosts[-2], hosts[-1]))
+ host = '*.' + host
+ else:
+ iprs = ip.split('.')
+ host = '%s.%s.*.*' % (iprs[0], iprs[1])
+ slips.append(host)
+
+ # IP
+ if board["slip"] == '3':
+ if hide_end:
+ host = '[*.*.*.*]'
+ else:
+ iprs = ip.split('.')
+ host = '[%s.%s.*.*]' % (iprs[0], iprs[1])
+ slips.append(host)
+
+ if slips:
+ post["tripcode"] += " (%s)" % ' '.join(slips)
+
+ # country code
+ if board["countrycode"] == '1':
+ if hide_end or addressIsTor(ip):
+ country = '??'
+ else:
+ country = getCountry(ip)
+ post["name"] += " <em>[%s]</em>" % country
+
+ # set expiration date if necessary
+ if board["maxage"] != '0' and not post["parentid"]:
+ if board["dir"] == '2d':
+ date_format = '%m月%d日'
+ date_format_y = '%Y年%m月'
+ else:
+ date_format = '%d/%m'
+ date_format_y = '%m/%Y'
+ post["expires"] = int(t) + (int(board["maxage"]) * 86400)
+ if int(board["maxage"]) >= 365:
+ date_format = date_format_y
+ post["expires_formatted"] = datetime.datetime.fromtimestamp(
+ post["expires"]).strftime(date_format)
+
+ if not post["parentid"]:
+ # fill with default values if creating a new thread
+ post["length"] = 1
+ post["last"] = post["timestamp"]
+
+ if board["dir"] == 'noticias':
+ # check if there's at least one link
+ if "<a href" not in post["message"]:
+ raise UserError, "Al momento de crear un hilo en esta sección necesitas incluír al menos 1 link como fuente en tu mensaje."
+
+ # insert icon if needed
+ img_src = '<img src="%s" alt="ico" /><br />' % getRandomIco()
+ post["message"] = img_src + post["message"]
+
+ # insert post, then run timThreads to make sure the board doesn't exceed the page limit
+ postid = post.insert()
+
+ # delete threads that have crossed last page
+ trimThreads()
+
+ # fix null references when creating thread
+ if board["board_type"] == '1' and not post["parentid"]:
+ post["message"] = re.compile(r'<a href="/(\w+)/res/0.html/(.+)"').sub(
+ r'<a href="/\1/res/'+str(postid)+r'.html/\2"', post["message"])
+ UpdateDb("UPDATE `posts` SET message = '%s' WHERE boardid = '%s' AND id = '%s'" % (_mysql.escape_string(
+ post["message"]), _mysql.escape_string(board["id"]), _mysql.escape_string(str(postid))))
+
+ # do operations if replying to a thread (bump, autoclose, update cache)
+ logTime("Updating thread")
+ thread_length = None
+ if post["parentid"]:
+ # get length of the thread
+ thread_length = threadNumReplies(post["parentid"])
+
+ # bump if not saged
+ if 'sage' not in post["email"].lower() and parent_post['locked'] != '2':
+ UpdateDb("UPDATE `posts` SET bumped = %d WHERE (`id` = '%s' OR `parentid` = '%s') AND `boardid` = '%s'" % (
+ post["timestamp"], post["parentid"], post["parentid"], board["id"]))
+
+ # check if thread must be closed
+ autoclose_thread(post["parentid"], t, thread_length)
+
+ # update final attributes (length and last post)
+ UpdateDb("UPDATE `posts` SET length = %d, last = %d WHERE `id` = '%s' AND `boardid` = '%s'" % (
+ thread_length, post["timestamp"], post["parentid"], board["id"]))
+
+ # update cache
+ threadUpdated(post["parentid"])
+ else:
+ # create cache for new thread
+ threadUpdated(postid)
+
+ regenerateHome()
+
+ # make page redirect
+ ttaken = timeTaken(_STARTTIME, time.clock())
+ noko = 'noko' in email.lower() or (board["board_type"] == '1')
+
+ # get new post url
+ post_url = make_url(postid, post, parent_post or post, noko, mobile)
+
+ if board['secret'] == '0':
+ # add to recent posts
+ if Settings.ENABLE_RSS:
+ latestAdd(post, thread_length, postid, parent_post)
+ # call discord hook
+ if Settings.ENABLE_DISCORD_HOOK and not post["parentid"]:
+ hook_url = make_url(
+ postid, post, parent_post or post, True, False)
+ discord_hook(post, hook_url)
+
+ return (post_url, ttaken, postid)
+
+ def delete_post(self, boarddir, postid, imageonly, password, mobile=False):
OpenDb()
- try:
- board = setBoard(self.formdata["board"])
- message = format_post(self.formdata["message"], self.environ["REMOTE_ADDR"], self.formdata["parentid"])
- self.output = message
- except Exception, messagez:
- self.output = "Error: " + str(messagez) + " : " + str(self.formdata)
- if not caught:
- # Redirect the user back to the front page
- self.output += '<html xmlns="http://www.w3.org/1999/xhtml"><body><meta http-equiv="refresh" content="0;url=%s" /><p>--&gt; --&gt; --&gt;</p></body></html>' % Settings.HOME_URL
-
- def make_post(self, ip, boarddir, parent, trap1, trap2, name, email, subject, message, file, file_original, spoil, oek_file, password, noimage, mobile):
- _STARTTIME = time.clock() # Comment if not debug
-
- if addressIsUS(ip):
- raise UserError, "Host en lista negra."
-
- # open database
- OpenDb()
-
- # set the board
- board = setBoard(boarddir)
-
- if board["dir"] != ["anarkia"]:
- if addressIsProxy(ip):
- raise UserError, "Proxy prohibido en esta sección."
-
- # check length of fields
- if len(name) > 50:
- raise UserError, "El campo de nombre es muy largo."
- if len(email) > 50:
- raise UserError, "El campo de e-mail es muy largo."
- if len(subject) > 100:
- raise UserError, "El campo de asunto es muy largo."
- if len(message) > 8000:
- raise UserError, "El campo de mensaje es muy largo."
- if message.count('\n') > 50:
- raise UserError, "El mensaje tiene muchos saltos de línea."
-
- # anti-spam trap
- if trap1 or trap2:
- raise UserError, "Te quedan tres días de vida."
-
- # Create a single datetime now so everything syncs up
- t = time.time()
-
- # Delete expired bans
- deletedBans = UpdateDb("DELETE FROM `bans` WHERE `until` != 0 AND `until` < " + str(timestamp()))
- if deletedBans > 0:
- regenerateAccess()
-
- # Redirect to ban page if user is banned
- if addressIsBanned(ip, board["dir"]):
- #raise UserError, 'Tu host está en la lista negra.'
- raise UserError, '<meta http-equiv="refresh" content="0; url=/cgi/banned/%s">' % board["dir"]
-
- # Disallow posting if the site OR board is in maintenance
- if Settings.MAINTENANCE:
- raise UserError, _("%s is currently under maintenance. We'll be back.") % Settings.SITE_TITLE
- if board["locked"] == '1':
- raise UserError, _("This board is closed. You can't post in it.")
-
- # create post object
- post = Post(board["id"])
- post["ip"] = inet_aton(ip)
- post["timestamp"] = post["bumped"] = int(t)
- post["timestamp_formatted"] = formatTimestamp(t)
-
- # load parent info if we are replying
- parent_post = None
- parent_timestamp = post["timestamp"]
- if parent:
- parent_post = get_parent_post(parent, board["id"])
- parent_timestamp = parent_post['timestamp']
- post["parentid"] = parent_post['id']
- post["bumped"] = parent_post['bumped']
- if parent_post['locked'] == '1':
- raise UserError, _("The thread is closed. You can't post in it.")
-
- # check if the user is flooding
- flood_check(t, post, board["id"])
-
- # use fields only if enabled
- if board["disable_name"] != '1':
- post["name"] = cleanString(name)
- post["email"] = cleanString(email, quote=True)
- if board["disable_subject"] != '1':
- post["subject"] = cleanString(subject)
-
- # process tripcodes
- post["name"], post["tripcode"] = tripcode(post["name"])
-
- # Remove carriage return, they're useless
- message = message.replace("\r", "")
-
- # check ! functions before
- extend = extend_str = dice = ball = None
-
- if not post["parentid"] and board["dir"] not in ['bai', 'world']:
- # creating thread
- __extend = re.compile(r"^!extend(:\w+)(:\w+)?\n")
- res = __extend.match(message)
- if res:
- extend = res.groups()
- # truncate extend
- extend_str = res.group(0)
- message = message[res.end(0):]
-
- if board["dir"] in ['juegos', '0', 'polka']:
- __dice = re.compile(r"^!dado(:\w+)(:\w+)?\n")
- res = __dice.match(message)
- if res:
- dice = res.groups()
- message = message[res.end(0):]
-
- if board["dir"] in ['zonavip', '0', 'polka']:
- __ball = re.compile(r"^!bola8\n")
- res = __ball.match(message)
- if res:
- ball = True
- message = message[res.end(0):]
-
- # use and format message
- if message.strip():
- post["message"] = format_post(message, ip, post["parentid"], parent_timestamp)
-
- # add function messages
- if extend_str:
- extend_str = extend_str.replace('!extend', 'EXTEND')
- post["message"] += '<hr />' + extend_str + ' configurado.'
- if dice:
- post["message"] += '<hr />' + throw_dice(dice)
- if ball:
- post["message"] += '<hr />' + magic_ball()
-
- if not post["parentid"] and post["email"].lower() == 'sage':
- post["email"] = ""
-
- # disallow illegal characters
- if post["name"]:
- post["name"] = post["name"].replace('★', '☆')
- post["name"] = post["name"].replace('◆', '◇')
-
- # process capcodes
- cap_id = hide_end = None
- if post["name"] in Settings.CAPCODES:
- capcode = Settings.CAPCODES[post["name"]]
- if post["tripcode"] == (Settings.TRIP_CHAR + capcode[0]):
- post["name"], post["tripcode"] = capcode[1], capcode[2]
- #if board['board_type'] == '1':
- # post["name"], post["tripcode"] = capcode[1], ''
- #else:
- # post["name"] = post["tripcode"] = ''
- # post["message"] = ('[<span style="color:red">%s</span>]<br />' % capcode[2]) + post["message"]
-
- cap_id, hide_end = capcode[3], capcode[4]
-
- # hide ip if necessary
- if hide_end:
- post["ip"] = 0
-
- # use password
- post["password"] = password
-
- # EXTEND feature
- if post["parentid"] and board["dir"] not in ['bai', 'world']:
- # replying
- __extend = re.compile(r"<hr />EXTEND(:\w+)(:\w+)?\b")
- res = __extend.search(parent_post["message"])
- if res:
- extend = res.groups()
-
- # compatibility : old id function
- if 'id' in parent_post["email"]:
- board["useid"] = '3'
-
- if 'id' in post["email"]:
- board["useid"] = '3'
-
- if extend:
- try:
- # 1: ID
- if extend[0] == ':no':
- board["useid"] = '0'
- elif extend[0] == ':yes':
- board["useid"] = '1'
- elif extend[0] == ':force':
- board["useid"] = '2'
- elif extend[0] == ':extra':
- board["useid"] = '3'
-
- # 2: Slip
- if extend[1] == ':no':
- board["slip"] = '0'
- elif extend[1] == ':yes':
- board["slip"] = '1'
- elif extend[1] == ':domain':
- board["slip"] = '2'
- elif extend[1] == ':verbose':
- board["slip"] = '3'
- elif extend[1] == ':country':
- board["countrycode"] = '1'
- elif extend[1] == ':all':
- board["slip"] = '3'
- board["countrycode"] = '1'
- except IndexError:
- pass
-
- # if we are replying, use first post's time
- if post["parentid"]:
- tim = parent_post["timestamp"]
- else:
- tim = post["timestamp"]
-
- # make ID hash
- if board["useid"] != '0':
- post["timestamp_formatted"] += ' ID:' + iphash(ip, post, tim, board["useid"], mobile, self.environ["HTTP_USER_AGENT"], cap_id, hide_end, (board["countrycode"] in ['1', '2']))
-
- # use for future file checks
- xfile = (file or oek_file)
-
- # textboard inforcements (change it to settings maybe?)
- if board['board_type'] == '1':
- if not post["parentid"] and not post["subject"]:
- raise UserError, _("You must enter a title to create a thread.")
- if not post["message"]:
- raise UserError, _("Please enter a message.")
- else:
- if not post["parentid"] and not xfile and not noimage:
- raise UserError, _("You must upload an image first to create a thread.")
- if not xfile and not post["message"]:
- raise UserError, _("Please enter a message or upload an image to reply.")
-
- # check if this post is allowed
- if post["parentid"]:
- if file and board['allow_image_replies'] == '0':
- raise UserError, _("Image replies not allowed.")
- else:
- if file and board['allow_images'] == '0':
- raise UserError, _("No images allowed.")
-
- # use default values when missing / remove sage from wrong fields
- if (not post["name"] and not post["tripcode"]) or (post["name"].lower() == 'sage'):
- post["name"] = random.choice(board["anonymous"].split('|'))
- if (not post["subject"] and not post["parentid"]) or (post["subject"].lower() == 'sage'):
- post["subject"] = board["subject"]
- if not post["message"]:
- post["message"] = board["message"]
-
- # process files
- if oek_file:
- try:
- fname = os.path.join(Settings.IMAGES_DIR, board['dir'], "temp", oek_file + ".png")
- with open(fname, 'rb') as f:
- file = f.read()
- except:
- raise UserError, "Imposible leer la imagen oekaki."
-
- if file and not noimage:
- post = processImage(post, file, t, file_original, (spoil and board['allow_spoilers'] == '1'))
-
- if oek_file:
- # Remove temporary oekaki file if everything went right
- #os.remove(fname)
- # TODO: We will rename the file for now. We don't want lost work.
- try:
- os.rename(fname, fname + ".bak")
- except:
- pass # Just keep it if anything went wrong
-
- # slip
- if board["slip"] != '0':
- slips = []
-
- # name
- if board["slip"] in ['1', '3']:
- if time.strftime("%H") in ['00', '24'] and time.strftime("%M") == '00' and time.strftime("%S") == '00':
- host_nick = '000000'
+
+ # set the board
+ board = setBoard(boarddir)
+
+ if board["dir"] == '0':
+ raise UserError, "No se pueden eliminar mensajes en esta sección."
+
+ # check if we have a post id and check it's numeric
+ if not postid:
+ raise UserError, "Selecciona uno o más mensajes a eliminar."
+
+ # make sure we have a password
+ if not password:
+ raise UserError, _("Please enter a password.")
+
+ to_delete = []
+ if isinstance(postid, list):
+ to_delete = [n.value for n in postid]
else:
- host_nick = 'sarin'
-
- if hide_end:
- host_nick = '★'
- elif addressIsTor(ip):
- host_nick = 'onion'
- else:
- isps = {'cablevision': 'easy',
- 'cantv': 'warrior',
- 'claro': 'america',
- 'cnet': 'nova',
- 'copelnet': 'cisneros',
- 'cps.com': 'silver',
- 'cybercable': 'bricklayer',
- 'entel': 'matte',
- 'eternet': 'stream',
- 'fibertel': 'roughage',
- 'geonet': 'thunder',
- 'gtdinternet': 'casanueva',
- 'ifxnw': 'effect',
- 'infinitum': 'telegraph',
- 'intercable': 'easy',
- 'intercity': 'cordoba',
- 'iplannet': 'conquest',
- 'itcsa.net': 'sarmiento',
- 'megared': 'clear',
- 'movistar': 'bell',
- 'nextel': 'fleet',
- 'speedy': 'oxygen',
- 'telecom': 'license',
- 'telmex': 'slender',
- 'telnor': 'compass',
- 'tie.cl': 'bell',
- 'vtr.net': 'liberty',
- 'utfsm': 'virgin',
- }
- host = getHost(ip)
-
- if host:
- for k, v in isps.iteritems():
- if k in host:
- host_nick = v
- break
-
- slips.append(host_nick)
-
- # hash
- if board["slip"] in ['1', '3']:
- if hide_end:
- slips.append('-'.join(('****', '****')))
- elif addressIsTor(ip):
- slips.append('-'.join(('****', getMD5(self.environ["HTTP_USER_AGENT"])[:4])))
+ to_delete = [postid]
+
+ # delete posts
+ if board['board_type'] == '1' and len(to_delete) == 1:
+ # we should be deleting only one (textboard)
+ # check if it's the last post and delete permanently if so
+ deltype = '0'
+ post = FetchOne("SELECT `id`, `timestamp`, `parentid` FROM `posts` WHERE `boardid` = %s AND `id` = %s LIMIT 1" % (
+ board["id"], str(to_delete[0])))
+ if post['parentid'] != '0':
+ op = get_parent_post(post['parentid'], board['id'])
+ if op['last'] != post['timestamp']:
+ deltype = '1'
+
+ deletePost(to_delete[0], password, deltype, imageonly)
+ latestRemove(post['id'])
+ regenerateHome()
else:
- slips.append('-'.join((getMD5(ip)[:4], getMD5(self.environ["HTTP_USER_AGENT"])[:4])))
-
- # host
- if board["slip"] == '2':
- if hide_end:
- host = '★'
- elif addressIsTor(ip):
- host = 'onion'
+ # delete all checked posts (IB)
+ deleted = 0
+ errors = 0
+ msgs = []
+
+ for pid in to_delete:
+ try:
+ deletePost(pid, password, board['recyclebin'], imageonly)
+ latestRemove(pid)
+ deleted += 1
+ msgs.append('No.%s: Eliminado' % pid)
+ except UserError, message:
+ errors += 1
+ msgs.append('No.%s: %s' % (pid, message))
+
+ # regenerate home
+ if deleted:
+ regenerateHome()
+
+ # show errors, if any
+ if errors:
+ raise UserError, 'No todos los mensajes pudieron ser eliminados.<br />' + \
+ '<br />'.join(msgs)
+
+ # redirect
+ if imageonly:
+ self.output += '<html xmlns="http://www.w3.org/1999/xhtml"><body><meta http-equiv="refresh" content="0;url=%s/" /><p>%s</p></body></html>' % (
+ ("/cgi/mobile/" if mobile else Settings.BOARDS_URL) + board["dir"], _("File deleted successfully."))
else:
- host = getHost(ip)
- if host:
- hosts = host.split('.')
- if len(hosts) > 2:
- if hosts[-2] in ['ne', 'net', 'com', 'co']:
- host = '.'.join((hosts[-3], hosts[-2], hosts[-1]))
- else:
- host = '.'.join((hosts[-2], hosts[-1]))
- host = '*.' + host
- else:
- iprs = ip.split('.')
- host = '%s.%s.*.*' % (iprs[0], iprs[1])
- slips.append(host)
-
- # IP
- if board["slip"] == '3':
- if hide_end:
- host = '[*.*.*.*]'
+ self.output += '<html xmlns="http://www.w3.org/1999/xhtml"><body><meta http-equiv="refresh" content="0;url=%s/" /><p>%s</p></body></html>' % (
+ ("/cgi/mobile/" if mobile else Settings.BOARDS_URL) + board["dir"], _("Post deleted successfully."))
+
+ def report(self, ip, boarddir, postid, reason, txt, postshow):
+ # don't allow if the report system is off
+ if not Settings.REPORTS_ENABLE:
+ raise UserError, _('Report system is deactivated.')
+
+ # if there's not a reason, show the report page
+ if reason is None:
+ self.output += renderTemplate("report.html",
+ {'finished': False, 'postshow': postshow, 'txt': txt})
+ return
+
+ # check reason
+ if not reason:
+ raise UserError, _("Enter a reason.")
+ if len(reason) > 100:
+ raise UserError, _("Text too long.")
+
+ # open database
+ OpenDb()
+
+ # set the board we're in
+ board = setBoard(boarddir)
+
+ # check if he's banned
+ if addressIsBanned(ip, board["dir"]):
+ raise UserError, _("You're banned.")
+
+ # check if post exists
+ post = FetchOne("SELECT `id`, `parentid`, `ip` FROM `posts` WHERE `id` = '%s' AND `boardid` = '%s'" % (
+ _mysql.escape_string(str(postid)), _mysql.escape_string(board['id'])))
+ if not post:
+ raise UserError, _("Post doesn't exist.")
+
+ # generate link
+ if board["board_type"] == '1':
+ parent_post = get_parent_post(post["parentid"], board["id"])
+ link = "/%s/read/%s/%s" % (board["dir"],
+ parent_post["timestamp"], postshow)
else:
- iprs = ip.split('.')
- host = '[%s.%s.*.*]' % (iprs[0], iprs[1])
- slips.append(host)
-
- if slips:
- post["tripcode"] += " (%s)" % ' '.join(slips)
-
- # country code
- if board["countrycode"] == '1':
- if hide_end or addressIsTor(ip):
- country = '??'
- else:
- country = getCountry(ip)
- post["name"] += " <em>[%s]</em>" % country
-
- # set expiration date if necessary
- if board["maxage"] != '0' and not post["parentid"]:
- if board["dir"] == '2d':
- date_format = '%m月%d日'
- date_format_y = '%Y年%m月'
- else:
- date_format = '%d/%m'
- date_format_y = '%m/%Y'
- post["expires"] = int(t) + (int(board["maxage"]) * 86400)
- if int(board["maxage"]) >= 365:
- date_format = date_format_y
- post["expires_formatted"] = datetime.datetime.fromtimestamp(post["expires"]).strftime(date_format)
-
- if not post["parentid"]:
- # fill with default values if creating a new thread
- post["length"] = 1
- post["last"] = post["timestamp"]
-
- if board["dir"] == 'noticias':
- # check if there's at least one link
- if "<a href" not in post["message"]:
- raise UserError, "Al momento de crear un hilo en esta sección necesitas incluír al menos 1 link como fuente en tu mensaje."
-
- # insert icon if needed
- img_src = '<img src="%s" alt="ico" /><br />' % getRandomIco()
- post["message"] = img_src + post["message"]
-
- # insert post, then run timThreads to make sure the board doesn't exceed the page limit
- postid = post.insert()
-
- # delete threads that have crossed last page
- trimThreads()
-
- # fix null references when creating thread
- if board["board_type"] == '1' and not post["parentid"]:
- post["message"] = re.compile(r'<a href="/(\w+)/res/0.html/(.+)"').sub(r'<a href="/\1/res/'+str(postid)+r'.html/\2"', post["message"])
- UpdateDb("UPDATE `posts` SET message = '%s' WHERE boardid = '%s' AND id = '%s'" % (_mysql.escape_string(post["message"]), _mysql.escape_string(board["id"]), _mysql.escape_string(str(postid))))
-
- # do operations if replying to a thread (bump, autoclose, update cache)
- logTime("Updating thread")
- thread_length = None
- if post["parentid"]:
- # get length of the thread
- thread_length = threadNumReplies(post["parentid"])
-
- # bump if not saged
- if 'sage' not in post["email"].lower() and parent_post['locked'] != '2':
- UpdateDb("UPDATE `posts` SET bumped = %d WHERE (`id` = '%s' OR `parentid` = '%s') AND `boardid` = '%s'" % (post["timestamp"], post["parentid"], post["parentid"], board["id"]))
-
- # check if thread must be closed
- autoclose_thread(post["parentid"], t, thread_length)
-
- # update final attributes (length and last post)
- UpdateDb("UPDATE `posts` SET length = %d, last = %d WHERE `id` = '%s' AND `boardid` = '%s'" % (thread_length, post["timestamp"], post["parentid"], board["id"]))
-
- # update cache
- threadUpdated(post["parentid"])
- else:
- # create cache for new thread
- threadUpdated(postid)
-
- regenerateHome()
-
- # make page redirect
- ttaken = timeTaken(_STARTTIME, time.clock())
- noko = 'noko' in email.lower() or (board["board_type"] == '1')
-
- # get new post url
- post_url = make_url(postid, post, parent_post or post, noko, mobile)
-
- if board['secret'] == '0':
- # add to recent posts
- if Settings.ENABLE_RSS:
- latestAdd(post, thread_length, postid, parent_post)
- # call discord hook
- if Settings.ENABLE_DISCORD_HOOK and not post["parentid"]:
- hook_url = make_url(postid, post, parent_post or post, True, False)
- discord_hook(post, hook_url)
-
- return (post_url, ttaken, postid)
-
- def delete_post(self, boarddir, postid, imageonly, password, mobile=False):
- OpenDb()
-
- # set the board
- board = setBoard(boarddir)
-
- if board["dir"] == '0':
- raise UserError, "No se pueden eliminar mensajes en esta sección."
-
- # check if we have a post id and check it's numeric
- if not postid:
- raise UserError, "Selecciona uno o más mensajes a eliminar."
-
- # make sure we have a password
- if not password:
- raise UserError, _("Please enter a password.")
-
- to_delete = []
- if isinstance(postid, list):
- to_delete = [n.value for n in postid]
- else:
- to_delete = [postid]
-
- # delete posts
- if board['board_type'] == '1' and len(to_delete) == 1:
- # we should be deleting only one (textboard)
- # check if it's the last post and delete permanently if so
- deltype = '0'
- post = FetchOne("SELECT `id`, `timestamp`, `parentid` FROM `posts` WHERE `boardid` = %s AND `id` = %s LIMIT 1" % (board["id"], str(to_delete[0])))
- if post['parentid'] != '0':
- op = get_parent_post(post['parentid'], board['id'])
- if op['last'] != post['timestamp']:
- deltype = '1'
-
- deletePost(to_delete[0], password, deltype, imageonly)
- latestRemove(post['id'])
- regenerateHome()
- else:
- # delete all checked posts (IB)
- deleted = 0
- errors = 0
- msgs = []
-
- for pid in to_delete:
+ link = "/%s/res/%d.html#%d" % (board["dir"], int(
+ post["parentid"]) or int(post["id"]), int(post["id"]))
+
+ # insert report
+ t = time.time()
+ message = cgi.escape(self.formdata["reason"]).strip()[0:8000]
+ message = message.replace("\n", "<br />")
+
+ UpdateDb("INSERT INTO `reports` (board, postid, parentid, link, ip, reason, reporterip, timestamp, timestamp_formatted) VALUES ('%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s')" % (
+ board["dir"], post['id'], post['parentid'], link, post['ip'], _mysql.escape_string(message), _mysql.escape_string(self.environ["REMOTE_ADDR"]), str(t), formatTimestamp(t)))
+ self.output = renderTemplate("report.html", {'finished': True})
+
+ def stats(self):
+ import json
+ import math
+ import platform
try:
- deletePost(pid, password, board['recyclebin'], imageonly)
- latestRemove(pid)
- deleted += 1
- msgs.append('No.%s: Eliminado' % pid)
- except UserError, message:
- errors += 1
- msgs.append('No.%s: %s' % (pid, message))
-
- # regenerate home
- if deleted:
- regenerateHome()
-
- # show errors, if any
- if errors:
- raise UserError, 'No todos los mensajes pudieron ser eliminados.<br />' + '<br />'.join(msgs)
-
- # redirect
- if imageonly:
- self.output += '<html xmlns="http://www.w3.org/1999/xhtml"><body><meta http-equiv="refresh" content="0;url=%s/" /><p>%s</p></body></html>' % (("/cgi/mobile/" if mobile else Settings.BOARDS_URL) + board["dir"], _("File deleted successfully."))
- else:
- self.output += '<html xmlns="http://www.w3.org/1999/xhtml"><body><meta http-equiv="refresh" content="0;url=%s/" /><p>%s</p></body></html>' % (("/cgi/mobile/" if mobile else Settings.BOARDS_URL) + board["dir"], _("Post deleted successfully."))
-
- def report(self, ip, boarddir, postid, reason, txt, postshow):
- # don't allow if the report system is off
- if not Settings.REPORTS_ENABLE:
- raise UserError, _('Report system is deactivated.')
-
- # if there's not a reason, show the report page
- if reason is None:
- self.output += renderTemplate("report.html", {'finished': False, 'postshow': postshow, 'txt': txt})
- return
-
- # check reason
- if not reason:
- raise UserError, _("Enter a reason.")
- if len(reason) > 100:
- raise UserError, _("Text too long.")
-
- # open database
- OpenDb()
-
- # set the board we're in
- board = setBoard(boarddir)
-
- # check if he's banned
- if addressIsBanned(ip, board["dir"]):
- raise UserError, _("You're banned.")
-
- # check if post exists
- post = FetchOne("SELECT `id`, `parentid`, `ip` FROM `posts` WHERE `id` = '%s' AND `boardid` = '%s'" % (_mysql.escape_string(str(postid)), _mysql.escape_string(board['id'])))
- if not post:
- raise UserError, _("Post doesn't exist.")
-
- # generate link
- if board["board_type"] == '1':
- parent_post = get_parent_post(post["parentid"], board["id"])
- link = "/%s/read/%s/%s" % (board["dir"], parent_post["timestamp"], postshow)
- else:
- link = "/%s/res/%d.html#%d" % (board["dir"], int(post["parentid"]) or int(post["id"]), int(post["id"]))
-
- # insert report
- t = time.time()
- message = cgi.escape(self.formdata["reason"]).strip()[0:8000]
- message = message.replace("\n", "<br />")
-
- UpdateDb("INSERT INTO `reports` (board, postid, parentid, link, ip, reason, reporterip, timestamp, timestamp_formatted) VALUES ('%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s')" % (board["dir"], post['id'], post['parentid'], link, post['ip'], _mysql.escape_string(message), _mysql.escape_string(self.environ["REMOTE_ADDR"]), str(t), formatTimestamp(t)))
- self.output = renderTemplate("report.html", {'finished': True})
-
- def stats(self):
- import json, math, platform
- try:
- with open('stats.json', 'r') as f:
- out = json.load(f)
- except ValueError:
- out = {'t': 0}
-
- regenerated = False
- if (time.time() - out['t']) > 3600:
- regenerated = True
-
- # open database
- OpenDb()
-
- # 1 week = 604800
- query_day = FetchAll("SELECT DATE_FORMAT(FROM_UNIXTIME(FLOOR((timestamp-10800)/86400)*86400+86400), \"%Y-%m-%d\"), COUNT(1), COUNT(IF(parentid=0, 1, NULL)) "
- "FROM posts "
- "WHERE (timestamp-10800) > (UNIX_TIMESTAMP()-604800) AND (IS_DELETED = 0 OR IS_DELETED = 3) "
- "GROUP BY FLOOR((timestamp-10800)/86400) "
- "ORDER BY FLOOR((timestamp-10800)/86400)", 0)
-
- query_count = FetchOne("SELECT COUNT(1), COUNT(NULLIF(file, '')), VERSION() FROM posts", 0)
- total = int(query_count[0])
- total_files = int(query_count[1])
- mysql_ver = query_count[2]
-
- archive_count = FetchOne("SELECT SUM(length) FROM archive", 0)
- total_archived = int(archive_count[0])
-
- days = []
- for date, count, threads in query_day[1:]:
- days.append( (date, count, threads) )
-
- query_b = FetchAll("SELECT id, dir, name FROM boards WHERE boards.secret = 0", 0)
-
- boards = []
- totalp = 0
- for id, dir, longname in query_b:
- bposts = FetchOne("SELECT COUNT(1) FROM posts "
- "WHERE '"+str(id)+"' = posts.boardid AND timestamp > ( UNIX_TIMESTAMP(DATE(NOW())) - 2419200 )", 0)
- boards.append( (dir, longname, int(bposts[0])) )
- totalp += int(bposts[0])
-
- boards = sorted(boards, key=lambda boards: boards[2], reverse=True)
-
- boards_percent = []
- for dir, longname, bposts in boards:
- if bposts > 0:
- boards_percent.append( (dir, longname, '{0:.2f}'.format( float(bposts)*100/totalp ), int(bposts) ) )
- else:
- boards_percent.append( (dir, longname, '0.00', '0' ) )
-
- #posts = FetchAll("SELECT `parentid`, `boardid` FROM `posts` INNER JOIN `boards` ON posts.boardid = boards.id WHERE posts.parentid<>0 AND posts.timestamp>(UNIX_TIMESTAMP()-86400) AND boards.secret=0 ORDER BY `parentid`")
- #threads = {}
- #for post in posts:
- # if post["parentid"] in threads:
- # threads[post["parentid"]] += 1
- # else:
- # threads[post["parentid"]] = 1
-
- python_version = platform.python_version()
- if self.environ.get('FCGI_FORCE_CGI', 'N').upper().startswith('Y'):
- python_version += " (CGI)"
- else:
- python_version += " (FastCGI)"
-
- out = {
- "uname": platform.uname(),
- "python_ver": python_version,
- "python_impl": platform.python_implementation(),
- "python_build": platform.python_build()[1],
- "python_compiler": platform.python_compiler(),
- "mysql_ver": mysql_ver,
- "tenjin_ver": tenjin.__version__,
- "weabot_ver": __version__,
- "days": days,
- "boards": boards,
- "boards_percent": boards_percent,
- "total": total,
- "total_files": total_files,
- "total_archived": total_archived,
- "t": timestamp(),
- "tz": Settings.TIME_ZONE,
- }
- with open('stats.json', 'w') as f:
- json.dump(out, f)
-
- out['timestamp'] = re.sub(r"\(...\)", " ", formatTimestamp(out['t']))
- out['regenerated'] = regenerated
- self.output = renderTemplate("stats.html", out)
- #self.headers = [("Content-Type", "application/json")]
+ with open('stats.json', 'r') as f:
+ out = json.load(f)
+ except ValueError:
+ out = {'t': 0}
+
+ regenerated = False
+ if (time.time() - out['t']) > 3600:
+ regenerated = True
+
+ # open database
+ OpenDb()
+
+ # 1 week = 604800
+ query_day = FetchAll("SELECT DATE_FORMAT(FROM_UNIXTIME(FLOOR((timestamp-10800)/86400)*86400+86400), \"%Y-%m-%d\"), COUNT(1), COUNT(IF(parentid=0, 1, NULL)) "
+ "FROM posts "
+ "WHERE (timestamp-10800) > (UNIX_TIMESTAMP()-604800) AND (IS_DELETED = 0 OR IS_DELETED = 3) "
+ "GROUP BY FLOOR((timestamp-10800)/86400) "
+ "ORDER BY FLOOR((timestamp-10800)/86400)", 0)
+
+ query_count = FetchOne(
+ "SELECT COUNT(1), COUNT(NULLIF(file, '')), VERSION() FROM posts", 0)
+ total = int(query_count[0])
+ total_files = int(query_count[1])
+ mysql_ver = query_count[2]
+
+ archive_count = FetchOne("SELECT SUM(length) FROM archive", 0)
+ total_archived = int(archive_count[0])
+
+ days = []
+ for date, count, threads in query_day[1:]:
+ days.append((date, count, threads))
+
+ query_b = FetchAll(
+ "SELECT id, dir, name FROM boards WHERE boards.secret = 0", 0)
+
+ boards = []
+ totalp = 0
+ for id, dir, longname in query_b:
+ bposts = FetchOne("SELECT COUNT(1) FROM posts "
+ "WHERE '"+str(id)+"' = posts.boardid AND timestamp > ( UNIX_TIMESTAMP(DATE(NOW())) - 2419200 )", 0)
+ boards.append((dir, longname, int(bposts[0])))
+ totalp += int(bposts[0])
+
+ boards = sorted(boards, key=lambda boards: boards[2], reverse=True)
+
+ boards_percent = []
+ for dir, longname, bposts in boards:
+ if bposts > 0:
+ boards_percent.append((dir, longname, '{0:.2f}'.format(
+ float(bposts)*100/totalp), int(bposts)))
+ else:
+ boards_percent.append((dir, longname, '0.00', '0'))
+
+ #posts = FetchAll("SELECT `parentid`, `boardid` FROM `posts` INNER JOIN `boards` ON posts.boardid = boards.id WHERE posts.parentid<>0 AND posts.timestamp>(UNIX_TIMESTAMP()-86400) AND boards.secret=0 ORDER BY `parentid`")
+ #threads = {}
+ # for post in posts:
+ # if post["parentid"] in threads:
+ # threads[post["parentid"]] += 1
+ # else:
+ # threads[post["parentid"]] = 1
+
+ python_version = platform.python_version()
+ if self.environ.get('FCGI_FORCE_CGI', 'N').upper().startswith('Y'):
+ python_version += " (CGI)"
+ else:
+ python_version += " (FastCGI)"
+
+ out = {
+ "uname": platform.uname(),
+ "python_ver": python_version,
+ "python_impl": platform.python_implementation(),
+ "python_build": platform.python_build()[1],
+ "python_compiler": platform.python_compiler(),
+ "mysql_ver": mysql_ver,
+ "tenjin_ver": tenjin.__version__,
+ "weabot_ver": __version__,
+ "days": days,
+ "boards": boards,
+ "boards_percent": boards_percent,
+ "total": total,
+ "total_files": total_files,
+ "total_archived": total_archived,
+ "t": timestamp(),
+ "tz": Settings.TIME_ZONE,
+ }
+ with open('stats.json', 'w') as f:
+ json.dump(out, f)
+
+ out['timestamp'] = re.sub(r"\(...\)", " ", formatTimestamp(out['t']))
+ out['regenerated'] = regenerated
+ self.output = renderTemplate("stats.html", out)
+ #self.headers = [("Content-Type", "application/json")]
+
if __name__ == "__main__":
- from fcgi import WSGIServer
-
- # Psyco is not required, however it will be used if available
- try:
- import psyco
- logTime("Psyco se ha instalado")
- psyco.bind(tenjin.helpers.to_str)
- psyco.bind(weabot.run, 2)
- psyco.bind(getFormData)
- psyco.bind(setCookie)
- psyco.bind(threadUpdated)
- psyco.bind(processImage)
- except:
- pass
-
- WSGIServer(weabot).run()
+ from fcgi import WSGIServer
+
+ # Psyco is not required, however it will be used if available
+ try:
+ import psyco
+ logTime("Psyco se ha instalado")
+ psyco.bind(tenjin.helpers.to_str)
+ psyco.bind(weabot.run, 2)
+ psyco.bind(getFormData)
+ psyco.bind(setCookie)
+ psyco.bind(threadUpdated)
+ psyco.bind(processImage)
+ except:
+ pass
+ WSGIServer(weabot).run()