Search for custom columns implemented #494

Search for ratings related to #559
Search description #572
code cosmetics
This commit is contained in:
OzzieIsaacs 2018-08-12 18:21:57 +02:00
parent 791f4a8078
commit 5ef70890d9
7 changed files with 272 additions and 183 deletions

View File

@ -160,7 +160,8 @@ var RarVolumeHeader = function(bstream) {
// read in filename
this.filename = bstream.readBytes(this.nameSize);
for (var _i = 0, _s = ""; _i < this.filename.length ; _i++) {
var _s = ""
for (var _i = 0; _i < this.filename.length ; _i++) {
_s += String.fromCharCode(this.filename[_i]);
}
@ -209,7 +210,7 @@ var RarVolumeHeader = function(bstream) {
}
};
var BLOCK_LZ = 0;
//var BLOCK_LZ = 0;
var rLDecode = [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 12, 14, 16, 20, 24, 28, 32, 40, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224],
rLBits = [0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5],
@ -227,17 +228,17 @@ var rDBits = [0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5,
5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 13, 13, 14, 14,
15, 15, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16];
var rLOW_DIST_REP_COUNT = 16;
var rLowDistRepCount = 16;
var rNC = 299,
rDC = 60,
rLDC = 17,
rRC = 28,
rBC = 20,
rHUFF_TABLE_SIZE = (rNC + rDC + rRC + rLDC);
rHuffTableSize = (rNC + rDC + rRC + rLDC);
//var UnpBlockType = BLOCK_LZ;
var UnpOldTable = new Array(rHUFF_TABLE_SIZE);
var UnpOldTable = new Array(rHuffTableSize);
var BD = { //bitdecode
DecodeLen: new Array(16),
@ -270,8 +271,8 @@ var rBuffer;
// read in Huffman tables for RAR
function RarReadTables(bstream) {
var BitLength = new Array(rBC),
Table = new Array(rHUFF_TABLE_SIZE);
Table = new Array(rHuffTableSize);
var i;
// before we start anything we need to get byte-aligned
bstream.readBits( (8 - bstream.bitPtr) & 0x7 );
@ -281,7 +282,6 @@ function RarReadTables(bstream) {
}
if (!bstream.readBits(1)) { //discard old table
var i;
for (i = UnpOldTable.length; i--;) UnpOldTable[i] = 0;
}
@ -307,24 +307,25 @@ function RarReadTables(bstream) {
// now all 20 bit lengths are obtained, we construct the Huffman Table:
RarMakeDecodeTables(BitLength, 0, BD, rBC);
rarMakeDecodeTables(BitLength, 0, BD, rBC);
var TableSize = rHUFF_TABLE_SIZE;
var TableSize = rHuffTableSize;
//console.log(DecodeLen, DecodePos, DecodeNum);
for (i = 0; i < TableSize;) {
var N;
var num = RarDecodeNumber(bstream, BD);
if (num < 16) {
Table[i] = (num + UnpOldTable[i]) & 0xf;
i++;
} else if (num < 18) {
var N = (num === 16) ? (bstream.readBits(3) + 3) : (bstream.readBits(7) + 11);
N = (num === 16) ? (bstream.readBits(3) + 3) : (bstream.readBits(7) + 11);
while (N-- > 0 && i < TableSize) {
Table[i] = Table[i - 1];
i++;
}
} else {
var N = (num === 18) ? (bstream.readBits(3) + 3) : (bstream.readBits(7) + 11);
N = (num === 18) ? (bstream.readBits(3) + 3) : (bstream.readBits(7) + 11);
while (N-- > 0 && i < TableSize) {
Table[i++] = 0;
@ -332,10 +333,10 @@ function RarReadTables(bstream) {
}
}
RarMakeDecodeTables(Table, 0, LD, rNC);
RarMakeDecodeTables(Table, rNC, DD, rDC);
RarMakeDecodeTables(Table, rNC + rDC, LDD, rLDC);
RarMakeDecodeTables(Table, rNC + rDC + rLDC, RD, rRC);
rarMakeDecodeTables(Table, 0, LD, rNC);
rarMakeDecodeTables(Table, rNC, DD, rDC);
rarMakeDecodeTables(Table, rNC + rDC, LDD, rLDC);
rarMakeDecodeTables(Table, rNC + rDC + rLDC, RD, rRC);
for (i = UnpOldTable.length; i--;) {
UnpOldTable[i] = Table[i];
@ -348,20 +349,20 @@ function RarDecodeNumber(bstream, dec) {
var DecodeLen = dec.DecodeLen, DecodePos = dec.DecodePos, DecodeNum = dec.DecodeNum;
var bitField = bstream.getBits() & 0xfffe;
//some sort of rolled out binary search
var bits = ((bitField < DecodeLen[8])?
var bits = ((bitField < DecodeLen[8]) ?
((bitField < DecodeLen[4]) ?
((bitField < DecodeLen[2]) ?
((bitField < DecodeLen[1]) ? 1 : 2)
: ((bitField < DecodeLen[3]) ? 3 : 4))
: (bitField < DecodeLen[6])?
((bitField < DecodeLen[5]) ? 5 : 6)
:((bitField < DecodeLen[7]) ? 7 : 8))
: ((bitField < DecodeLen[3]) ? 3 : 4))
: (bitField < DecodeLen[6]) ?
((bitField < DecodeLen[5]) ? 5 : 6)
: ((bitField < DecodeLen[7]) ? 7 : 8))
: ((bitField < DecodeLen[12]) ?
((bitField < DecodeLen[10]) ?
((bitField < DecodeLen[9]) ? 9 : 10)
:((bitField < DecodeLen[11]) ? 11 : 12))
: (bitField < DecodeLen[14]) ?
((bitField < DecodeLen[13]) ? 13 : 14)
: ((bitField < DecodeLen[11]) ? 11 : 12))
: (bitField < DecodeLen[14]) ?
((bitField < DecodeLen[13]) ? 13 : 14)
: 15));
bstream.readBits(bits);
var N = DecodePos[bits] + ((bitField - DecodeLen[bits -1]) >>> (16 - bits));
@ -370,7 +371,7 @@ function RarDecodeNumber(bstream, dec) {
}
function RarMakeDecodeTables(BitLength, offset, dec, size) {
function rarMakeDecodeTables(BitLength, offset, dec, size) {
var DecodeLen = dec.DecodeLen, DecodePos = dec.DecodePos, DecodeNum = dec.DecodeNum;
var LenCount = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
TmpPos = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
@ -385,7 +386,7 @@ function RarMakeDecodeTables(BitLength, offset, dec, size) {
DecodeLen[0] = 0;
for (var I = 1; I < 16; ++I) {
N = 2 * (N+LenCount[I]);
N = 2 * (N + LenCount[I]);
M = (N << (15-I));
if (M > 0xFFFF)
M = 0xFFFF;
@ -393,9 +394,11 @@ function RarMakeDecodeTables(BitLength, offset, dec, size) {
DecodePos[I] = DecodePos[I-1] + LenCount[I-1];
TmpPos[I] = DecodePos[I];
}
for (I = 0; I < size; ++I)
if (BitLength[I + offset] != 0)
DecodeNum[ TmpPos[ BitLength[offset + I] & 0xF ]++] = I;
for (I = 0; I < size; ++I) {
if (BitLength[I + offset] != 0) {
DecodeNum[ TmpPos[ BitLength[offset + I] & 0xF ]++] = I;
}
}
}
// TODO: implement
@ -504,7 +507,7 @@ function RarReadTables20(bstream) {
TableSize = rNC20 + rDC20 + rRC20;
for (var I = 0; I < rBC20; I++)
BitLength[I] = bstream.readBits(4);
RarMakeDecodeTables(BitLength, 0, BD, rBC20);
rarMakeDecodeTables(BitLength, 0, BD, rBC20);
I = 0;
while (I < TableSize) {
var num = RarDecodeNumber(bstream, BD);
@ -528,9 +531,9 @@ function RarReadTables20(bstream) {
}
}
}
RarMakeDecodeTables(Table, 0, LD, rNC20);
RarMakeDecodeTables(Table, rNC20, DD, rDC20);
RarMakeDecodeTables(Table, rNC20 + rDC20, RD, rRC20);
rarMakeDecodeTables(Table, 0, LD, rNC20);
rarMakeDecodeTables(Table, rNC20, DD, rDC20);
rarMakeDecodeTables(Table, rNC20 + rDC20, RD, rRC20);
for (var i = UnpOldTable20.length; i--;) UnpOldTable20[i] = Table[i];
}
@ -596,7 +599,7 @@ function Unpack29(bstream, Solid) {
} else {
var LowDist = RarDecodeNumber(bstream, LDD);
if (LowDist === 16) {
lowDistRepCount = rLOW_DIST_REP_COUNT - 1;
lowDistRepCount = rLowDistRepCount - 1;
Distance += prevLowDist;
} else {
Distance += LowDist;
@ -774,8 +777,7 @@ var RarLocalFile = function(bstream) {
if (this.header.headType != FILE_HEAD && this.header.headType != ENDARC_HEAD) {
this.isValid = false;
info("Error! RAR Volume did not include a FILE_HEAD header ");
}
else {
} else {
// read in the compressed data
this.fileData = null;
if (this.header.packSize > 0) {

View File

@ -7,7 +7,7 @@
*
* TAR format: http://www.gnu.org/software/automake/manual/tar/Standard.html
*/
/* global bitjs, importScripts, Uint8Array */
/* global bitjs, importScripts, Uint8Array */
// This file expects to be invoked as a Worker (see onmessage below).
importScripts("io.js");
@ -72,9 +72,9 @@ var TarLocalFile = function(bstream) {
if (this.prefix.length) {
this.name = this.prefix + this.name;
}
bstream.readBytes(12); // 512 - 500
bstream.readBytes(12); // 512 - 500
} else {
bstream.readBytes(255); // 512 - 257
bstream.readBytes(255); // 512 - 257
}
// Done header, now rest of blocks are the file contents.
@ -88,7 +88,7 @@ var TarLocalFile = function(bstream) {
// A regular file.
if (this.typeflag === 0) {
info(" This is a regular file.");
var sizeInBytes = parseInt(this.size);
// var sizeInBytes = parseInt(this.size);
this.fileData = new Uint8Array(bstream.bytes.buffer, bstream.ptr, this.size);
if (this.name.length > 0 && this.size > 0 && this.fileData && this.fileData.buffer) {
this.isValid = true;
@ -100,9 +100,9 @@ var TarLocalFile = function(bstream) {
var remaining = 512 - this.size % 512;
if (remaining > 0 && remaining < 512) {
bstream.readBytes(remaining);
}
}
} else if (this.typeflag === 5) {
info(" This is a directory.")
info(" This is a directory.");
}
};
@ -127,7 +127,7 @@ var untar = function(arrayBuffer) {
if (oneLocalFile && oneLocalFile.isValid) {
localFiles.push(oneLocalFile);
totalUncompressedBytesInArchive += oneLocalFile.size;
}
}
}
totalFilesInArchive = localFiles.length;

View File

@ -119,8 +119,7 @@ ZipLocalFile.prototype.unzip = function() {
// version == 20, compression method == 8 (DEFLATE)
info("ZIP v2.0, DEFLATE: " + this.filename + " (" + this.compressedSize + " bytes)");
this.fileData = inflate(this.fileData, this.uncompressedSize);
}
else {
} else {
err("UNSUPPORTED VERSION/FORMAT: ZIP v" + this.version + ", compression method=" + this.compressionMethod + ": " + this.filename + " (" + this.compressedSize + " bytes)");
this.fileData = null;
}
@ -510,30 +509,30 @@ function inflate(compressedData, numDecompressedBytes) {
var buffer = new bitjs.io.ByteBuffer(numDecompressedBytes);
//var numBlocks = 0;
var blockSize = 0;
var bFinal;
// block format: http://tools.ietf.org/html/rfc1951#page-9
do {
var bFinal = bstream.readBits(1);
bFinal = bstream.readBits(1);
var bType = bstream.readBits(2);
blockSize = 0;
// ++numBlocks;
// no compression
if (bType === 0) {
// skip remaining bits in this byte
while (bstream.bitPtr != 0) bstream.readBits(1);
while (bstream.bitPtr !== 0) bstream.readBits(1);
var len = bstream.readBits(16),
nlen = bstream.readBits(16);
// nlen = bstream.readBits(16);
// TODO: check if nlen is the ones-complement of len?
if(len > 0) buffer.insertBytes(bstream.readBytes(len));
if (len > 0) buffer.insertBytes(bstream.readBytes(len));
blockSize = len;
}
// fixed Huffman codes
else if(bType === 1) {
else if (bType === 1) {
blockSize = inflateBlockData(bstream, getFixedLiteralTable(), getFixedDistanceTable(), buffer);
}
// dynamic Huffman codes
else if(bType === 2) {
else if (bType === 2) {
var numLiteralLengthCodes = bstream.readBits(5) + 257;
var numDistanceCodes = bstream.readBits(5) + 1,
numCodeLengthCodes = bstream.readBits(4) + 4;
@ -567,24 +566,25 @@ function inflate(compressedData, numDecompressedBytes) {
var prevCodeLength = 0;
while (literalCodeLengths.length < numLiteralLengthCodes + numDistanceCodes) {
var symbol = decodeSymbol(bstream, codeLengthsCodes);
var repeat;
if (symbol <= 15) {
literalCodeLengths.push(symbol);
prevCodeLength = symbol;
}
else if (symbol === 16) {
var repeat = bstream.readBits(2) + 3;
repeat = bstream.readBits(2) + 3;
while (repeat--) {
literalCodeLengths.push(prevCodeLength);
}
}
else if (symbol === 17) {
var repeat = bstream.readBits(3) + 3;
repeat = bstream.readBits(3) + 3;
while (repeat--) {
literalCodeLengths.push(0);
}
}
else if (symbol === 18) {
var repeat = bstream.readBits(7) + 11;
repeat = bstream.readBits(7) + 11;
while (repeat--) {
literalCodeLengths.push(0);
}

View File

@ -8,6 +8,10 @@
{% else %}
<h2>{{entries|length}} {{_('Results for:')}} {{searchterm}}</h2>
{%endif%}
{% if g.user.is_authenticated %}
<!--a href="{{ url_for('read_books') }}" class="btn btn-primary">{{ _('Search result to shelf') }} </a-->
{% endif %}
<div class="row">

View File

@ -14,22 +14,21 @@
<label for="Publisher">{{_('Publisher')}}</label>
<input type="text" class="form-control" name="publisher" id="publisher" value="">
</div>
<!--div class="form-group"-->
<div class="row">
<div class="form-group col-sm-6">
<label for="Publishstart">{{_('Publishing date from')}}</label>
<div style="position: relative">
<input type="date" class="form-control" name="Publishstart" id="Publishstart" value="">
<input type="text" class="form-control fake-input hidden" id="fake_Publishstart" value="">
</div>
</div>
<div class="form-group col-sm-6">
<label for="Publishend">{{_('Publishing date to')}}</label>
<div style="position: relative">
<input type="date" class="form-control" name="Publishend" id="Publishend" value="">
<input type="text" class="form-control fake-input hidden" id="fake_Publishend" value="">
</div>
</div>
<div class="row">
<div class="form-group col-sm-6">
<label for="Publishstart">{{_('Publishing date from')}}</label>
<div style="position: relative">
<input type="date" class="form-control" name="Publishstart" id="Publishstart" value="">
<input type="text" class="form-control fake-input hidden" id="fake_Publishstart" value="">
</div>
</div>
<div class="form-group col-sm-6">
<label for="Publishend">{{_('Publishing date to')}}</label>
<div style="position: relative">
<input type="date" class="form-control" name="Publishend" id="Publishend" value="">
<input type="text" class="form-control fake-input hidden" id="fake_Publishend" value="">
</div>
</div>
</div>
<label for="include_tag">{{_('Tags')}}</label>
<div class="form-group" id="test">
@ -71,7 +70,7 @@
{% endfor %}
</div>
</div>
{% if languages %}
{% if languages %}
<label for="include_language">{{_('Languages')}}</label>
<div class="form-group">
<div class="btn-toolbar btn-toolbar-lg" data-toggle="buttons">
@ -92,7 +91,65 @@
{% endfor %}
</div>
</div>
{% endif%}
{% endif%}
<div class="row">
<div class="form-group col-sm-6">
<label for="ratinghigh">{{_('Rating bigger than')}}</label>
<input type="number" name="ratinghigh" id="ratinghigh" class="rating input-lg" data-clearable="" >
</div>
<div class="form-group col-sm-6">
<label for="ratinglow">{{_('Rating less than')}}</label>
<input type="number" name="ratinglow" id="ratinglow" class="rating input-lg" data-clearable="" >
</div>
</div>
<div class="form-group">
<label for="comment">{{_('Description')}}</label>
<input type="text" class="form-control" name="comment" id="comment" value="">
</div>
{% if cc|length > 0 %}
{% for c in cc %}
<div class="form-group">
<label for="{{ 'custom_column_' ~ c.id }}">{{ c.name }}</label>
{% if c.datatype == 'bool' %}
<select name="{{ 'custom_column_' ~ c.id }}" id="{{ 'custom_column_' ~ c.id }}" class="form-control">
<option value="" selected></option>
<option value="True" >{{_('Yes')}}</option>
<option value="False" >{{_('No')}}</option>
</select>
{% endif %}
{% if c.datatype == 'int' %}
<input type="number" step="1" class="form-control" name="{{ 'custom_column_' ~ c.id }}" id="{{ 'custom_column_' ~ c.id }}" value="">
{% endif %}
{% if c.datatype in ['text', 'series'] and not c.is_multiple %}
<input type="text" class="form-control" name="{{ 'custom_column_' ~ c.id }}" id="{{ 'custom_column_' ~ c.id }}" value="">
{% endif %}
{% if c.datatype in ['text', 'series'] and c.is_multiple %}
<input type="text" class="form-control" name="{{ 'custom_column_' ~ c.id }}" id="{{ 'custom_column_' ~ c.id }}" value="">
{% endif %}
{% if c.datatype == 'enumeration' %}
<select class="form-control" name="{{ 'custom_column_' ~ c.id }}" id="{{ 'custom_column_' ~ c.id }}">
<option></option>
{% for opt in c.get_display_dict().enum_values %}
<option>{{ opt }}</option>
{% endfor %}
</select>
{% endif %}
{% if c.datatype == 'rating' %}
<input type="number" min="1" max="5" step="1" class="form-control" name="{{ 'custom_column_' ~ c.id }}" id="{{ 'custom_column_' ~ c.id }}"
{% if book['custom_column_' ~ c.id]|length > 0 %}
value="{{ '%d' % (book['custom_column_' ~ c.id][0].value / 2) }}"
{% endif %}>
{% endif %}
</div>
{% endfor %}
{% endif %}
<button type="submit" class="btn btn-default">{{_('Submit')}}</button>
</form>
</div>
@ -107,6 +164,7 @@
{% if not g.user.locale == 'en' %}
<script src="{{ url_for('static', filename='js/libs/bootstrap-datepicker/locales/bootstrap-datepicker.' + g.user.locale + '.min.js') }}" charset="UTF-8"></script>
{% endif %}
<script src="{{ url_for('static', filename='js/libs/bootstrap-rating-input.min.js') }}"></script>
<script src="{{ url_for('static', filename='js/libs/typeahead.bundle.js') }}"></script>
<script src="{{ url_for('static', filename='js/edit_books.js') }}"></script>
<script>

View File

@ -914,32 +914,6 @@ def get_email_status_json():
if 'starttime' not in task:
task['starttime'] = ""
answer = tasks
'''answer.append({'user': 'Test', 'starttime': '07.3.2018 15:23', 'progress': " 0 %", 'type': 'E-Mail',
'runtime': '0 s', 'rt': 0, 'status': _('Waiting'),'id':1 })
answer.append({'user': 'Admin', 'starttime': '07.3.2018 15:33', 'progress': " 11 %", 'type': 'E-Mail',
'runtime': '2 s', 'rt':2, 'status': _('Waiting'),'id':2})
answer.append({'user': 'Nanny', 'starttime': '8.3.2018 15:23', 'progress': " 2 %", 'type': 'E-Mail',
'runtime': '32 s','rt':32, 'status': _('Waiting'),'id':3})
answer.append({'user': 'Guest', 'starttime': '09.3.2018 14:23', 'progress': " 44 %", 'type': 'E-Mail',
'runtime': '7 s','rt':7, 'status': _('Waiting'),'id':4})
answer.append({'user': 'Guest', 'starttime': '09.3.2018 14:23', 'progress': " 44 %", 'type': 'E-Mail',
'runtime': '22 s','rt':22, 'status': _('Waiting'),'id':5})
answer.append({'user': 'Guest', 'starttime': '09.3.2018 14:23', 'progress': " 44 %", 'type': 'E-Mail',
'runtime': '17 s','rt':17, 'status': _('Waiting'),'id':6})
answer.append({'user': 'Guest', 'starttime': '09.3.2018 14:23', 'progress': " 44 %", 'type': 'E-Mail',
'runtime': '72 s','rt':72, 'status': _('Waiting'),'id':7})
answer.append({'user': 'Guest', 'starttime': '19.3.2018 14:23', 'progress': " 44 %", 'type': 'E-Mail',
'runtime': '1:07 s','rt':67, 'status': _('Waiting'),'id':8})
answer.append({'user': 'Guest', 'starttime': '18.2.2018 12:23', 'progress': " 44 %", 'type': 'E-Mail',
'runtime': '2:07 s','rt':127, 'status': _('Waiting'),'id':9})
answer.append({'user': 'Guest', 'starttime': '09.3.2018 14:23', 'progress': " 44 %", 'type': 'E-Mail',
'runtime': '27 s','rt':27, 'status': _('Waiting'),'id':10})
answer.append({'user': 'Guest', 'starttime': '09.3.2018 16:23', 'progress': " 44 %", 'type': 'E-Mail',
'runtime': '73 s','rt':73, 'status': _('Waiting'),'id':11})
answer.append({'user': 'Guest', 'starttime': '09.3.2018 14:23', 'progress': " 44 %", 'type': 'E-Mail',
'runtime': '71 s','rt':71, 'status': _('Waiting'),'id':12})
answer.append({'user': 'Guest', 'starttime': '09.3.2018 17:23', 'progress': " 44 %", 'type': 'E-Mail',
'runtime': '27 s','rt':27, 'status': _('Waiting'),'id':13})'''
js=json.dumps(answer)
response = make_response(js)
response.headers["Content-Type"] = "application/json; charset=utf-8"
@ -1364,7 +1338,6 @@ def toggle_read(book_id):
book = db.session.query(db.Books).filter(db.Books.id == book_id).filter(common_filters()).first()
read_status = getattr(book, 'custom_column_' + str(config.config_read_column))
if len(read_status):
#setattr(getattr(book,'custom_column_' + str(cc_id), 'value', (not read_status))
read_status[0].value = not read_status[0].value
db.session.commit()
else:
@ -1722,89 +1695,140 @@ def search():
return render_title_template('search.html', searchterm="", page="search")
@app.route("/advanced_search", methods=["GET"])
@app.route("/advanced_search", methods=['GET'])
@login_required_if_no_ano
def advanced_search():
if request.method == 'GET':
db.session.connection().connection.connection.create_function("lower", 1, db.lcase)
q = db.session.query(db.Books)
include_tag_inputs = request.args.getlist('include_tag')
exclude_tag_inputs = request.args.getlist('exclude_tag')
include_series_inputs = request.args.getlist('include_serie')
exclude_series_inputs = request.args.getlist('exclude_serie')
include_languages_inputs = request.args.getlist('include_language')
exclude_languages_inputs = request.args.getlist('exclude_language')
# Build custom columns names
tmpcc = db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
if config.config_columns_to_ignore:
cc = []
for col in tmpcc:
r = re.compile(config.config_columns_to_ignore)
if r.match(col.label):
cc.append(col)
else:
cc = tmpcc
author_name = request.args.get("author_name")
book_title = request.args.get("book_title")
publisher = request.args.get("publisher")
pub_start = request.args.get("Publishstart")
pub_end = request.args.get("Publishend")
if author_name: author_name = author_name.strip().lower().replace(',','|')
if book_title: book_title = book_title.strip().lower()
if publisher: publisher = publisher.strip().lower()
if include_tag_inputs or exclude_tag_inputs or include_series_inputs or exclude_series_inputs or \
include_languages_inputs or exclude_languages_inputs or author_name or book_title or \
publisher or pub_start or pub_end:
searchterm = []
searchterm.extend((author_name.replace('|',','), book_title, publisher))
if pub_start:
try:
searchterm.extend([_(u"Published after %s" %
format_date(datetime.datetime.strptime(pub_start,"%Y-%m-%d"),
format='medium', locale=get_locale()))])
except ValueError:
pub_start = u""
if pub_end:
try:
searchterm.extend([_(u"Published before ") +
format_date(datetime.datetime.strptime(pub_end,"%Y-%m-%d"),
format='medium', locale=get_locale())])
except ValueError:
pub_start = u""
tag_names = db.session.query(db.Tags).filter(db.Tags.id.in_(include_tag_inputs)).all()
searchterm.extend(tag.name for tag in tag_names)
# searchterm = " + ".join(filter(None, searchterm))
serie_names = db.session.query(db.Series).filter(db.Series.id.in_(include_series_inputs)).all()
searchterm.extend(serie.name for serie in serie_names)
language_names = db.session.query(db.Languages).filter(db.Languages.id.in_(include_languages_inputs)).all()
for lang in language_names:
try:
cur_l = LC.parse(lang.lang_code)
lang.name = cur_l.get_language_name(get_locale())
except Exception:
lang.name = _(isoLanguages.get(part3=lang.lang_code).name)
searchterm.extend(language.name for language in language_names)
searchterm = " + ".join(filter(None, searchterm))
q = q.filter()
if author_name:
q = q.filter(db.Books.authors.any(db.Authors.name.ilike("%" + author_name + "%")))
if book_title:
q = q.filter(db.Books.title.ilike("%" + book_title + "%"))
if pub_start:
q = q.filter(db.Books.pubdate >= pub_start)
if pub_end:
q = q.filter(db.Books.pubdate <= pub_end)
if publisher:
q = q.filter(db.Books.publishers.any(db.Publishers.name.ilike("%" + publisher + "%")))
for tag in include_tag_inputs:
q = q.filter(db.Books.tags.any(db.Tags.id == tag))
for tag in exclude_tag_inputs:
q = q.filter(not_(db.Books.tags.any(db.Tags.id == tag)))
for serie in include_series_inputs:
q = q.filter(db.Books.series.any(db.Series.id == serie))
for serie in exclude_series_inputs:
q = q.filter(not_(db.Books.series.any(db.Series.id == serie)))
if current_user.filter_language() != "all":
q = q.filter(db.Books.languages.any(db.Languages.lang_code == current_user.filter_language()))
else:
for language in include_languages_inputs:
q = q.filter(db.Books.languages.any(db.Languages.id == language))
for language in exclude_languages_inputs:
q = q.filter(not_(db.Books.series.any(db.Languages.id == language)))
q = q.all()
return render_title_template('search.html', searchterm=searchterm,
entries=q, title=_(u"search"), page="search")
db.session.connection().connection.connection.create_function("lower", 1, db.lcase)
q = db.session.query(db.Books)
# postargs = request.form.to_dict()
include_tag_inputs = request.args.getlist('include_tag')
exclude_tag_inputs = request.args.getlist('exclude_tag')
include_series_inputs = request.args.getlist('include_serie')
exclude_series_inputs = request.args.getlist('exclude_serie')
include_languages_inputs = request.args.getlist('include_language')
exclude_languages_inputs = request.args.getlist('exclude_language')
author_name = request.args.get("author_name")
book_title = request.args.get("book_title")
publisher = request.args.get("publisher")
pub_start = request.args.get("Publishstart")
pub_end = request.args.get("Publishend")
rating_low = request.args.get("ratinghigh")
rating_high = request.args.get("ratinglow")
description = request.args.get("comment")
if author_name: author_name = author_name.strip().lower().replace(',','|')
if book_title: book_title = book_title.strip().lower()
if publisher: publisher = publisher.strip().lower()
searchterm = []
cc_present = False
for c in cc:
if request.args.get('custom_column_' + str(c.id)):
searchterm.extend([_(u"%s: %s" % (c.name, request.args.get('custom_column_' + str(c.id))))])
cc_present = True
if include_tag_inputs or exclude_tag_inputs or include_series_inputs or exclude_series_inputs or \
include_languages_inputs or exclude_languages_inputs or author_name or book_title or \
publisher or pub_start or pub_end or rating_low or rating_high or description or cc_present:
searchterm = []
searchterm.extend((author_name.replace('|',','), book_title, publisher))
if pub_start:
try:
searchterm.extend([_(u"Published after %s" %
format_date(datetime.datetime.strptime(pub_start,"%Y-%m-%d"),
format='medium', locale=get_locale()))])
except ValueError:
pub_start = u""
if pub_end:
try:
searchterm.extend([_(u"Published before ") +
format_date(datetime.datetime.strptime(pub_end,"%Y-%m-%d"),
format='medium', locale=get_locale())])
except ValueError:
pub_start = u""
tag_names = db.session.query(db.Tags).filter(db.Tags.id.in_(include_tag_inputs)).all()
searchterm.extend(tag.name for tag in tag_names)
serie_names = db.session.query(db.Series).filter(db.Series.id.in_(include_series_inputs)).all()
searchterm.extend(serie.name for serie in serie_names)
language_names = db.session.query(db.Languages).filter(db.Languages.id.in_(include_languages_inputs)).all()
for lang in language_names:
try:
cur_l = LC.parse(lang.lang_code)
lang.name = cur_l.get_language_name(get_locale())
except Exception:
lang.name = _(isoLanguages.get(part3=lang.lang_code).name)
searchterm.extend(language.name for language in language_names)
if rating_high:
searchterm.extend([_(u"Rating <= %s" % rating_high)])
if rating_low:
searchterm.extend([_(u"Rating >= %s" % rating_low)])
# handle custom columns
for c in cc:
if request.args.get('custom_column_' + str(c.id)):
searchterm.extend([_(u"%s: %s" % (c.name, request.args.get('custom_column_' + str(c.id))))])
searchterm = " + ".join(filter(None, searchterm))
q = q.filter()
if author_name:
q = q.filter(db.Books.authors.any(db.Authors.name.ilike("%" + author_name + "%")))
if book_title:
q = q.filter(db.Books.title.ilike("%" + book_title + "%"))
if pub_start:
q = q.filter(db.Books.pubdate >= pub_start)
if pub_end:
q = q.filter(db.Books.pubdate <= pub_end)
if publisher:
q = q.filter(db.Books.publishers.any(db.Publishers.name.ilike("%" + publisher + "%")))
for tag in include_tag_inputs:
q = q.filter(db.Books.tags.any(db.Tags.id == tag))
for tag in exclude_tag_inputs:
q = q.filter(not_(db.Books.tags.any(db.Tags.id == tag)))
for serie in include_series_inputs:
q = q.filter(db.Books.series.any(db.Series.id == serie))
for serie in exclude_series_inputs:
q = q.filter(not_(db.Books.series.any(db.Series.id == serie)))
if current_user.filter_language() != "all":
q = q.filter(db.Books.languages.any(db.Languages.lang_code == current_user.filter_language()))
else:
for language in include_languages_inputs:
q = q.filter(db.Books.languages.any(db.Languages.id == language))
for language in exclude_languages_inputs:
q = q.filter(not_(db.Books.series.any(db.Languages.id == language)))
if rating_high:
q = q.filter(db.Books.ratings.any(db.Ratings.id <= rating_high))
if rating_low:
q = q.filter(db.Books.ratings.any(db.Ratings.id >= rating_low))
if description:
q = q.filter(db.Books.comments.any(db.Comments.text.ilike("%" + description + "%")))
# search custom culumns
for c in cc:
custom_query = request.args.get('custom_column_' + str(c.id))
if custom_query:
if c.datatype == 'bool':
getattr(db.Books, 'custom_column_1')
q = q.filter(getattr(db.Books, 'custom_column_'+str(c.id)).any(
db.cc_classes[c.id].value == (custom_query== "True") ))
elif c.datatype == 'int':
q = q.filter(getattr(db.Books, 'custom_column_'+str(c.id)).any(
db.cc_classes[c.id].value == custom_query ))
else:
q = q.filter(getattr(db.Books, 'custom_column_'+str(c.id)).any(
db.cc_classes[c.id].value.ilike("%" + custom_query + "%")))
q = q.all()
return render_title_template('search.html', searchterm=searchterm,
entries=q, title=_(u"search"), page="search")
tags = db.session.query(db.Tags).order_by(db.Tags.name).all()
series = db.session.query(db.Series).order_by(db.Series.name).all()
if current_user.filter_language() == u"all":
@ -1817,8 +1841,9 @@ def advanced_search():
lang.name = _(isoLanguages.get(part3=lang.lang_code).name)
else:
languages = None
return render_title_template('search_form.html', tags=tags, languages=languages,
series=series, title=_(u"search"), page="advsearch")
series=series, title=_(u"search"), cc=cc, page="advsearch")

View File

@ -360,7 +360,7 @@ class WorkerThread(threading.Thread):
self.UIqueue[self.current]['message'] = error_message
return # error_message, RET_FAIL
def add_convert(self, file_path, bookid, user_name, type, settings, kindle_mail):
def add_convert(self, file_path, bookid, user_name, typ, settings, kindle_mail):
addLock = threading.Lock()
addLock.acquire()
if self.last >= 20:
@ -369,7 +369,7 @@ class WorkerThread(threading.Thread):
self.id += 1
self.queue.append({'file_path':file_path, 'bookid':bookid, 'starttime': 0, 'kindle':kindle_mail,
'status': STAT_WAITING, 'typ': TASK_CONVERT, 'settings':settings})
self.UIqueue.append({'user': user_name, 'formStarttime': '', 'progress': " 0 %", 'type': type,
self.UIqueue.append({'user': user_name, 'formStarttime': '', 'progress': " 0 %", 'type': typ,
'runtime': '0 s', 'status': _('Waiting'),'id': self.id } )
self.id += 1