| 1 |
nino.borges |
650 |
"""
|
| 2 |
|
|
|
| 3 |
|
|
EclipseSQLLib
|
| 4 |
|
|
|
| 5 |
|
|
Created by
|
| 6 |
|
|
Emanuel Borges
|
| 7 |
|
|
05.30.2018
|
| 8 |
|
|
|
| 9 |
|
|
A proper class for automating Eclipse through SQL.
|
| 10 |
|
|
|
| 11 |
|
|
"""
|
| 12 |
|
|
|
| 13 |
|
|
|
| 14 |
|
|
import pyodbc
|
| 15 |
|
|
import SQLBlobs
|
| 16 |
|
|
|
| 17 |
|
|
|
| 18 |
|
|
class Eclipse_Connection(object):
|
| 19 |
|
|
"""An Advanced Eclipse Connection Library and Toolkit"""
|
| 20 |
nino.borges |
671 |
version = '0.7.0'
|
| 21 |
|
|
sqlUserName = "eborges"
|
| 22 |
|
|
|
| 23 |
|
|
## Only add when compiling!!!
|
| 24 |
|
|
sqlPassword = ''
|
| 25 |
nino.borges |
650 |
|
| 26 |
nino.borges |
671 |
|
| 27 |
|
|
|
| 28 |
nino.borges |
650 |
def __init__(self):
|
| 29 |
|
|
self.caseMatrix = None
|
| 30 |
|
|
self.caseList = None
|
| 31 |
|
|
self.fieldMatrix = None
|
| 32 |
|
|
self.fieldList = None
|
| 33 |
|
|
self.currentDatabaseName = None
|
| 34 |
|
|
self.batchSetList = None
|
| 35 |
|
|
self.tagGroupMatrix = None
|
| 36 |
|
|
self.tagNameMatrix = None
|
| 37 |
|
|
self.tagGroupToTagNameMatrix = None
|
| 38 |
|
|
self._UpdateCaseMatrix()
|
| 39 |
|
|
print "\nEclipse SQL Library Initialized.\n\nCreated by Emanuel Borges\nVersion %s\n\n"% self.version
|
| 40 |
nino.borges |
671 |
|
| 41 |
|
|
def _GetNewCnxn(self, internalCaseName):
|
| 42 |
|
|
"""A simple funtion, not method and does not update any instance vars, to grab a new cnxn connection. Just to have one place to keep this line updated"""
|
| 43 |
|
|
cnxn = pyodbc.connect("Driver={ODBC Driver 17 for SQL Server};""Server=XIPRORVW01\XIPRORVW01;""Database=%s;Trusted_Connection=yes;"%internalCaseName)
|
| 44 |
|
|
#cnxn = pyodbc.connect("Driver={ODBC Driver 17 for SQL Server};""Server=XIPRORVW01\XIPRORVW01;""Database=%s;UID=%s;PWD=%s;Trusted_Connection=NO;"%(internalCaseName,self.sqlUserName, self.sqlPassword))
|
| 45 |
|
|
return cnxn
|
| 46 |
nino.borges |
650 |
|
| 47 |
|
|
def _UpdateCaseMatrix(self):
|
| 48 |
|
|
"""Will normally run only once when initiated, which populates the case Matrix and the sorted case list."""
|
| 49 |
|
|
matrix = {}
|
| 50 |
|
|
|
| 51 |
nino.borges |
671 |
#cnxn = pyodbc.connect("Driver={ODBC Driver 17 for SQL Server};""Server=XIPRORVW01\XIPRORVW01;""Database=AddConfig;""Trusted_Connection=yes;")
|
| 52 |
|
|
cnxn = self._GetNewCnxn('AddConfig')
|
| 53 |
nino.borges |
650 |
cursor = cnxn.cursor()
|
| 54 |
|
|
|
| 55 |
|
|
cursor.execute('select name,databasename from Enterprise.CaseProductEnvironment where ProductId=3')
|
| 56 |
|
|
|
| 57 |
|
|
for row in cursor:
|
| 58 |
|
|
matrix[row.name] = row.databasename
|
| 59 |
|
|
|
| 60 |
|
|
|
| 61 |
|
|
self.caseMatrix = matrix
|
| 62 |
|
|
self.caseList = self.caseMatrix.keys()
|
| 63 |
|
|
self.caseList.sort()
|
| 64 |
|
|
cnxn.close()
|
| 65 |
|
|
|
| 66 |
|
|
def _UpdateBatchSetList(self):
|
| 67 |
|
|
"""Will update the list of batch set names that exist in a db"""
|
| 68 |
|
|
batchSetList = []
|
| 69 |
|
|
internalCaseName = self.caseMatrix[self.currentDatabaseName]
|
| 70 |
|
|
|
| 71 |
nino.borges |
671 |
#cnxn = pyodbc.connect("Driver={ODBC Driver 17 for SQL Server};""Server=XIPRORVW01\XIPRORVW01;""Database=%s;""Trusted_Connection=yes;"%internalCaseName)
|
| 72 |
|
|
cnxn = self._GetNewCnxn(internalCaseName)
|
| 73 |
nino.borges |
650 |
cursor = cnxn.cursor()
|
| 74 |
|
|
|
| 75 |
|
|
cursor.execute('SELECT ReviewPassName FROM Review.ReviewPass ORDER BY ReviewPassName')
|
| 76 |
|
|
|
| 77 |
|
|
for row in cursor:
|
| 78 |
|
|
batchSetList.append(row.ReviewPassName)
|
| 79 |
|
|
|
| 80 |
|
|
if batchSetList:
|
| 81 |
|
|
batchSetList.sort()
|
| 82 |
|
|
self.batchSetList = batchSetList
|
| 83 |
|
|
else:
|
| 84 |
|
|
self.batchSetList = None
|
| 85 |
|
|
|
| 86 |
|
|
cnxn.close()
|
| 87 |
|
|
|
| 88 |
|
|
|
| 89 |
|
|
def _UpdateTagGroupNameMatrixes(self):
|
| 90 |
|
|
"""Updates three tag matrixes, tagGroup with id, tagNames with id and the tagGroup to tagName"""
|
| 91 |
|
|
tagGroupMatrix = {}
|
| 92 |
|
|
tagNameMatrix = {}
|
| 93 |
|
|
tagGroupToTagNameMatrix = {}
|
| 94 |
|
|
|
| 95 |
|
|
|
| 96 |
|
|
internalCaseName = self.caseMatrix[self.currentDatabaseName]
|
| 97 |
|
|
|
| 98 |
nino.borges |
671 |
#cnxn = pyodbc.connect("Driver={ODBC Driver 17 for SQL Server};""Server=XIPRORVW01\XIPRORVW01;""Database=%s;""Trusted_Connection=yes;"%internalCaseName)
|
| 99 |
|
|
cnxn = self._GetNewCnxn(internalCaseName)
|
| 100 |
nino.borges |
650 |
cursor = cnxn.cursor()
|
| 101 |
|
|
|
| 102 |
|
|
cursor.execute("SELECT TagGroups.TagGroupName, Tags.TagName, TagGroupsDetail.TagGroupId, TagGroupsDetail.TagId FROM TagGroups INNER JOIN TagGroupsDetail ON TagGroups.TagGroupId = TagGroupsDetail.TagGroupId INNER JOIN Tags ON TagGroupsDetail.TagId = Tags.TagId")
|
| 103 |
|
|
|
| 104 |
|
|
for row in cursor:
|
| 105 |
|
|
tagGroupMatrix[row.TagGroupName] = row.TagGroupId
|
| 106 |
|
|
tagNameMatrix[row.TagName] = row.TagId
|
| 107 |
|
|
if row.TagGroupName in tagGroupToTagNameMatrix.keys():
|
| 108 |
|
|
tagGroupToTagNameMatrix[row.TagGroupName].append(row.TagName)
|
| 109 |
|
|
else:
|
| 110 |
|
|
tagGroupToTagNameMatrix[row.TagGroupName] = [row.TagName,]
|
| 111 |
|
|
|
| 112 |
|
|
cnxn.close()
|
| 113 |
|
|
|
| 114 |
|
|
self.tagGroupMatrix = tagGroupMatrix
|
| 115 |
|
|
self.tagNameMatrix = tagNameMatrix
|
| 116 |
|
|
self.tagGroupToTagNameMatrix = tagGroupToTagNameMatrix
|
| 117 |
|
|
|
| 118 |
|
|
|
| 119 |
|
|
def _QueryByTag(self, tagName):
|
| 120 |
|
|
"""Performs a query by tag and returns a connection and cursor object. Dont forget to close both! For now this is private"""
|
| 121 |
|
|
|
| 122 |
|
|
internalCaseName = self.caseMatrix[self.currentDatabaseName]
|
| 123 |
|
|
|
| 124 |
nino.borges |
671 |
#cnxn = pyodbc.connect("Driver={ODBC Driver 17 for SQL Server};""Server=XIPRORVW01\XIPRORVW01;""Database=%s;""Trusted_Connection=yes;"%internalCaseName)
|
| 125 |
|
|
cnxn = self._GetNewCnxn(internalCaseName)
|
| 126 |
nino.borges |
650 |
cursor = cnxn.cursor()
|
| 127 |
|
|
|
| 128 |
|
|
cursor.execute("SELECT Tags.TagName, Documents.DocumentKey FROM TagGroupsDetail INNER JOIN TagGroups ON TagGroupsDetail.TagGroupId = TagGroups.TagGroupId INNER JOIN Tags ON TagGroupsDetail.TagId = Tags.TagId INNER JOIN DocumentTags ON Tags.TagId = DocumentTags.TagId INNER JOIN Documents ON DocumentTags.DocId = Documents.DocId WHERE (Tags.TagName = '%s') ORDER BY DocumentKey"% tagName)
|
| 129 |
|
|
|
| 130 |
|
|
return cnxn, cursor
|
| 131 |
|
|
|
| 132 |
|
|
|
| 133 |
|
|
def _ProcessSQLBlob(self, blobName, cursor):
|
| 134 |
|
|
"""Internal method only. Will take a blobname, find it in the SQLBlob class and process it. There MUST be a GO a the end for this to work."""
|
| 135 |
|
|
sqlQuery = ""
|
| 136 |
|
|
rawBlob = SQLBlobs.BlobListMatrix.matrix[blobName]
|
| 137 |
|
|
|
| 138 |
|
|
for i in rawBlob:
|
| 139 |
|
|
if i == 'GO\n':
|
| 140 |
|
|
cursor.execute(sqlQuery)
|
| 141 |
|
|
sqlQuery = ""
|
| 142 |
|
|
#elif 'PRINT' in i:
|
| 143 |
|
|
# display = i.split("'")[1]
|
| 144 |
|
|
# print(display, '\r')
|
| 145 |
|
|
else:
|
| 146 |
|
|
sqlQuery = sqlQuery + i
|
| 147 |
|
|
|
| 148 |
|
|
|
| 149 |
|
|
def SelectCase(self, caseName):
|
| 150 |
|
|
"""Sets the current database name and updates the fieldMatrix and the sorted field list and updates the batchsetlist and the tagMatrixes too"""
|
| 151 |
|
|
self.currentDatabaseName = caseName
|
| 152 |
|
|
|
| 153 |
|
|
matrix = {}
|
| 154 |
|
|
|
| 155 |
|
|
internalCaseName = self.caseMatrix[caseName]
|
| 156 |
|
|
|
| 157 |
nino.borges |
671 |
#cnxn = pyodbc.connect("Driver={ODBC Driver 17 for SQL Server};""Server=XIPRORVW01\XIPRORVW01;""Database=%s;""Trusted_Connection=yes;"%internalCaseName)
|
| 158 |
|
|
cnxn = self._GetNewCnxn(internalCaseName)
|
| 159 |
nino.borges |
650 |
cursor = cnxn.cursor()
|
| 160 |
|
|
|
| 161 |
|
|
cursor.execute('SELECT TableName, ColumnName, FieldName FROM vFieldDefinition')
|
| 162 |
|
|
|
| 163 |
|
|
for row in cursor:
|
| 164 |
|
|
matrix[row.FieldName] = (row.TableName,row.ColumnName)
|
| 165 |
|
|
|
| 166 |
|
|
self.fieldMatrix = matrix
|
| 167 |
|
|
self.fieldList = self.fieldMatrix.keys()
|
| 168 |
|
|
self.fieldList.sort()
|
| 169 |
|
|
|
| 170 |
|
|
cnxn.close()
|
| 171 |
|
|
self._UpdateBatchSetList()
|
| 172 |
|
|
self._UpdateTagGroupNameMatrixes()
|
| 173 |
|
|
|
| 174 |
|
|
|
| 175 |
|
|
def GetUniqueValues(self, fieldName):
|
| 176 |
|
|
"""Returns a list of unique values in field."""
|
| 177 |
|
|
uniqueValueList = []
|
| 178 |
|
|
|
| 179 |
|
|
internalCaseName = self.caseMatrix[self.currentDatabaseName]
|
| 180 |
|
|
|
| 181 |
nino.borges |
671 |
#cnxn = pyodbc.connect("Driver={ODBC Driver 17 for SQL Server};""Server=XIPRORVW01\XIPRORVW01;""Database=%s;""Trusted_Connection=yes;"%internalCaseName)
|
| 182 |
|
|
cnxn = self._GetNewCnxn(internalCaseName)
|
| 183 |
nino.borges |
650 |
cursor = cnxn.cursor()
|
| 184 |
|
|
|
| 185 |
|
|
internalTableName, internalFieldName = self.fieldMatrix[fieldName]
|
| 186 |
|
|
|
| 187 |
|
|
cursor.execute('Select DISTINCT %s FROM %s'% (internalFieldName, internalTableName))
|
| 188 |
|
|
|
| 189 |
|
|
for row in cursor:
|
| 190 |
|
|
#print row.Field000008
|
| 191 |
|
|
uniqueValueList.append(getattr(row,internalFieldName))
|
| 192 |
|
|
|
| 193 |
|
|
uniqueValueList.sort()
|
| 194 |
|
|
cnxn.close()
|
| 195 |
|
|
|
| 196 |
|
|
return uniqueValueList
|
| 197 |
|
|
|
| 198 |
|
|
def GetAnalyticsCategorySetsValues(self):
|
| 199 |
|
|
"""Returns a matrix of the values that a case has in all of the cat sets"""
|
| 200 |
|
|
matrix = {}
|
| 201 |
|
|
|
| 202 |
|
|
internalCaseName = self.caseMatrix[self.currentDatabaseName]
|
| 203 |
|
|
|
| 204 |
nino.borges |
671 |
#cnxn = pyodbc.connect("Driver={ODBC Driver 17 for SQL Server};""Server=XIPRORVW01\XIPRORVW01;""Database=%s;""Trusted_Connection=yes;"%internalCaseName)
|
| 205 |
|
|
cnxn = self._GetNewCnxn(internalCaseName)
|
| 206 |
nino.borges |
650 |
cursor = cnxn.cursor()
|
| 207 |
|
|
|
| 208 |
|
|
cursor.execute('Select top 10000 b.CategoryName, c.DocumentKey, a.IsEntireDocument, a.DocumentText FROM Analytics.CategoryExemplarDocument a left join [Analytics].[Category] b on b.CategoryId=a.CategoryId left join dbo.documents c on c.docid=a.docid order by 2 asc')
|
| 209 |
|
|
|
| 210 |
|
|
for row in cursor:
|
| 211 |
|
|
if row.IsEntireDocument:
|
| 212 |
|
|
docText = "FULL DOCUMENT"
|
| 213 |
|
|
else:
|
| 214 |
|
|
docText = row.DocumentText
|
| 215 |
|
|
if row.DocumentKey:
|
| 216 |
|
|
#docKey = "test"
|
| 217 |
|
|
docKey = row.DocumentKey
|
| 218 |
|
|
else:
|
| 219 |
|
|
docKey = "No Document"
|
| 220 |
|
|
if row.CategoryName in matrix.keys():
|
| 221 |
|
|
matrix[row.CategoryName].append((docKey,docText))
|
| 222 |
|
|
else:
|
| 223 |
|
|
matrix[row.CategoryName] = [(docKey,docText),]
|
| 224 |
|
|
|
| 225 |
|
|
cnxn.close()
|
| 226 |
|
|
|
| 227 |
|
|
return matrix
|
| 228 |
|
|
|
| 229 |
|
|
|
| 230 |
|
|
def GetProductionSize(self, prodVolFieldName, natFileSizeFieldName, volumeValue):
|
| 231 |
|
|
"""Returns the total production size, in GB, for a production with the volume of the voumeValue you give it. It rounds the number and, if you are under .01 it will just make it .01. Finally also returns 0.0 if the value was empty."""
|
| 232 |
|
|
internalCaseName = self.caseMatrix[self.currentDatabaseName]
|
| 233 |
|
|
|
| 234 |
nino.borges |
671 |
#cnxn = pyodbc.connect("Driver={ODBC Driver 17 for SQL Server};""Server=XIPRORVW01\XIPRORVW01;""Database=%s;""Trusted_Connection=yes;"%internalCaseName)
|
| 235 |
|
|
cnxn = self._GetNewCnxn(internalCaseName)
|
| 236 |
nino.borges |
650 |
cursor = cnxn.cursor()
|
| 237 |
|
|
|
| 238 |
|
|
internalProdVolTableName, internalProdVolFieldName = self.fieldMatrix[prodVolFieldName]
|
| 239 |
|
|
internalNatFileSizeTableName, internalNatFileSizeFieldName = self.fieldMatrix[natFileSizeFieldName]
|
| 240 |
|
|
|
| 241 |
|
|
if internalProdVolTableName == internalNatFileSizeTableName:
|
| 242 |
|
|
## No join needed
|
| 243 |
|
|
cursor.execute("Select %s, %s FROM %s WHERE %s LIKE '%s'"% (internalProdVolFieldName, internalNatFileSizeFieldName, internalProdVolTableName, internalProdVolFieldName,volumeValue))
|
| 244 |
|
|
else:
|
| 245 |
|
|
cursor.execute("Select %s.%s, %s.%s FROM %s INNER JOIN %s ON %s.DocId = %s.DocId WHERE %s LIKE '%s'"% (internalProdVolTableName,internalProdVolFieldName,internalNatFileSizeTableName, internalNatFileSizeFieldName, internalProdVolTableName,internalNatFileSizeTableName,internalProdVolTableName,internalNatFileSizeTableName,internalProdVolFieldName,volumeValue))
|
| 246 |
|
|
|
| 247 |
|
|
count = 0
|
| 248 |
|
|
|
| 249 |
|
|
for row in cursor:
|
| 250 |
|
|
docSize = getattr(row,internalNatFileSizeFieldName)
|
| 251 |
|
|
if docSize:
|
| 252 |
|
|
count = count + docSize
|
| 253 |
|
|
|
| 254 |
|
|
|
| 255 |
|
|
count2 = count / 1024
|
| 256 |
|
|
count3 = count2 / 1024
|
| 257 |
|
|
cnxn.close()
|
| 258 |
|
|
finalCount = round(count3,2)
|
| 259 |
|
|
if count3:
|
| 260 |
|
|
if finalCount < 0.01:
|
| 261 |
|
|
finalCount = 0.01
|
| 262 |
|
|
print count3,finalCount
|
| 263 |
|
|
return finalCount
|
| 264 |
|
|
|
| 265 |
|
|
|
| 266 |
|
|
def UpdateEndDocByTag(self, tagName, endDocField):
|
| 267 |
|
|
"""Updates the endDocField, filtering by a tag, but only the null value ones, by copying from the evidoxID field"""
|
| 268 |
|
|
internalCaseName = self.caseMatrix[self.currentDatabaseName]
|
| 269 |
|
|
|
| 270 |
nino.borges |
671 |
#cnxn = pyodbc.connect("Driver={ODBC Driver 17 for SQL Server};""Server=XIPRORVW01\XIPRORVW01;""Database=%s;""Trusted_Connection=yes;"%internalCaseName)
|
| 271 |
|
|
cnxn = self._GetNewCnxn(internalCaseName)
|
| 272 |
nino.borges |
650 |
cursor = cnxn.cursor()
|
| 273 |
|
|
|
| 274 |
|
|
cursor.execute("SELECT Tags.TagName, Documents.DocumentKey FROM TagGroupsDetail INNER JOIN TagGroups ON TagGroupsDetail.TagGroupId = TagGroups.TagGroupId INNER JOIN Tags ON TagGroupsDetail.TagId = Tags.TagId INNER JOIN DocumentTags ON Tags.TagId = DocumentTags.TagId INNER JOIN Documents ON DocumentTags.DocId = Documents.DocId WHERE (Tags.TagName = '%s') ORDER BY DocumentKey"% tagName)
|
| 275 |
|
|
|
| 276 |
|
|
|
| 277 |
|
|
|
| 278 |
|
|
def UpdateBatchReviewedStatus(self, batchSetName, queryOnly = True):
|
| 279 |
|
|
"""Updates the batch reviewed status flag for a particular batch set by name. default is to query only, which wont actually do any updates and only return the count"""
|
| 280 |
|
|
internalCaseName = self.caseMatrix[self.currentDatabaseName]
|
| 281 |
|
|
|
| 282 |
nino.borges |
671 |
#cnxn = pyodbc.connect("Driver={ODBC Driver 17 for SQL Server};""Server=XIPRORVW01\XIPRORVW01;""Database=%s;""Trusted_Connection=yes;"%internalCaseName)
|
| 283 |
|
|
cnxn = self._GetNewCnxn(internalCaseName)
|
| 284 |
nino.borges |
650 |
cursor = cnxn.cursor()
|
| 285 |
|
|
|
| 286 |
|
|
|
| 287 |
|
|
if queryOnly:
|
| 288 |
|
|
print "running test only"
|
| 289 |
|
|
cursor.execute("SELECT Review.BatchDocument.ReviewStatusId, Tags.TagName, Review.ReviewPass.ReviewPassName FROM TagGroupsDetail INNER JOIN TagGroups ON TagGroupsDetail.TagGroupId = TagGroups.TagGroupId INNER JOIN Review.BatchDocument INNER JOIN DocumentTags ON Review.BatchDocument.DocId = DocumentTags.DocId INNER JOIN DocumentFields_0002 ON Review.BatchDocument.DocId = DocumentFields_0002.DocId ON TagGroupsDetail.TagId = DocumentTags.TagId INNER JOIN Tags ON TagGroupsDetail.TagId = Tags.TagId AND DocumentTags.TagId = Tags.TagId INNER JOIN Review.Batch ON Review.BatchDocument.BatchId = Review.Batch.BatchId AND Review.BatchDocument.BatchId = Review.Batch.BatchId INNER JOIN Review.ReviewPass ON Review.Batch.ReviewPassId = Review.ReviewPass.ReviewPassId AND Review.Batch.ReviewPassId = Review.ReviewPass.ReviewPassId AND Review.Batch.ReviewPassId = Review.ReviewPass.ReviewPassId AND Review.Batch.ReviewPassId = Review.ReviewPass.ReviewPassId AND Review.Batch.ReviewPassId = Review.ReviewPass.ReviewPassId WHERE (Review.BatchDocument.ReviewStatusId = '0') AND (TagGroups.TagGroupName = N'Doc Review Designation') AND (Tags.TagName IS NOT NULL) AND (Review.ReviewPass.ReviewPassName = N'%s')"% batchSetName)
|
| 290 |
|
|
toBeUpdatedCount = len(cursor.fetchall())
|
| 291 |
|
|
else:
|
| 292 |
|
|
print "performing the update"
|
| 293 |
|
|
cursor.execute("UPDATE Review.BatchDocument SET ReviewStatusId = '1' FROM TagGroupsDetail INNER JOIN TagGroups ON TagGroupsDetail.TagGroupId = TagGroups.TagGroupId INNER JOIN Review.BatchDocument INNER JOIN DocumentTags ON Review.BatchDocument.DocId = DocumentTags.DocId INNER JOIN DocumentFields_0002 ON Review.BatchDocument.DocId = DocumentFields_0002.DocId ON TagGroupsDetail.TagId = DocumentTags.TagId INNER JOIN Tags ON TagGroupsDetail.TagId = Tags.TagId AND DocumentTags.TagId = Tags.TagId INNER JOIN Review.Batch ON Review.BatchDocument.BatchId = Review.Batch.BatchId AND Review.BatchDocument.BatchId = Review.Batch.BatchId INNER JOIN Review.ReviewPass ON Review.Batch.ReviewPassId = Review.ReviewPass.ReviewPassId AND Review.Batch.ReviewPassId = Review.ReviewPass.ReviewPassId AND Review.Batch.ReviewPassId = Review.ReviewPass.ReviewPassId AND Review.Batch.ReviewPassId = Review.ReviewPass.ReviewPassId AND Review.Batch.ReviewPassId = Review.ReviewPass.ReviewPassId WHERE (Review.BatchDocument.ReviewStatusId = '0') AND (TagGroups.TagGroupName = N'Doc Review Designation') AND (Tags.TagName IS NOT NULL) AND (Review.ReviewPass.ReviewPassName = N'%s')"% batchSetName)
|
| 294 |
|
|
toBeUpdatedCount = cursor.rowcount
|
| 295 |
|
|
cnxn.commit()
|
| 296 |
|
|
|
| 297 |
|
|
cnxn.close()
|
| 298 |
|
|
|
| 299 |
|
|
return toBeUpdatedCount
|
| 300 |
|
|
|
| 301 |
|
|
def GenerateSavedSearchesReport (self, pipeFile):
|
| 302 |
|
|
"""Generates a pipe delimited report from the saved searches"""
|
| 303 |
|
|
internalCaseName = self.caseMatrix[self.currentDatabaseName]
|
| 304 |
|
|
|
| 305 |
nino.borges |
671 |
#cnxn = pyodbc.connect("Driver={ODBC Driver 17 for SQL Server};""Server=XIPRORVW01\XIPRORVW01;""Database=%s;""Trusted_Connection=yes;"%internalCaseName)
|
| 306 |
|
|
cnxn = self._GetNewCnxn(internalCaseName)
|
| 307 |
nino.borges |
650 |
cursor = cnxn.cursor()
|
| 308 |
|
|
|
| 309 |
|
|
## First grab the parent folder matrix and prepopulate the -1 and 0 folders
|
| 310 |
|
|
folderMatrix = {}
|
| 311 |
|
|
folderMatrix[0] = ['<RootFolder>',0,0,'Nobody']
|
| 312 |
|
|
folderMatrix[-1] = ['<PrivateFolder>',0,0,'Nobody']
|
| 313 |
|
|
|
| 314 |
|
|
cursor.execute('SELECT SavedSearch.Folders.* FROM SavedSearch.Folders')
|
| 315 |
|
|
for row in cursor:
|
| 316 |
|
|
folderMatrix[getattr(row,'FolderId')] = [getattr(row,'FolderName'),getattr(row,'ParentFolderId'),getattr(row,'CreatedByUserName')]
|
| 317 |
|
|
|
| 318 |
|
|
|
| 319 |
|
|
## Now create the pipedelim file
|
| 320 |
|
|
outputFile = open(pipeFile,'w')
|
| 321 |
|
|
#outputFile.write(foo.encode('utf8'))
|
| 322 |
|
|
outputFile.write(u'UserName|ParentFolderId|SearchId|SearchName|CreatedDate|ModifiedDate|SearchItems\n'.encode('utf8'))
|
| 323 |
|
|
|
| 324 |
|
|
cursor.execute('SELECT ActivityTracking.DIM_User.UserName,SavedSearch.Searches.ParentFolderId,SavedSearch.Searches.SearchId, SavedSearch.Searches.SearchName, SavedSearch.Searches.CreatedDate,SavedSearch.Searches.ModifiedDate, SavedSearch.Searches.SearchItems FROM ActivityTracking.DIM_User INNER JOIN SavedSearch.Searches ON ActivityTracking.DIM_User.UserKey = SavedSearch.Searches.CreatedByKey AND ActivityTracking.DIM_User.UserKey = SavedSearch.Searches.ModifiedByKey AND ActivityTracking.DIM_User.UserKey = SavedSearch.Searches.CreatedByKey AND ActivityTracking.DIM_User.UserKey = SavedSearch.Searches.ModifiedByKey')
|
| 325 |
|
|
|
| 326 |
|
|
for row in cursor:
|
| 327 |
|
|
#outputFile.write("%s|%s|%s|%s|%s|%s|%s\n"% (getattr(row,'UserName'),folderMatrix[getattr(row,'ParentFolderId')][0],getattr(row,'SearchId'),getattr(row,'SearchName'),getattr(row,'CreatedDate'),getattr(row,'ModifiedDate',getattr(row,'SearchItems')))
|
| 328 |
|
|
outputFile.write(u"{0}|{1}|{2}|{3}|{4}|{5}|{6}\n".format(getattr(row,u'UserName'),folderMatrix[getattr(row,u'ParentFolderId')][0],getattr(row,u'SearchId'),getattr(row,u'SearchName'),getattr(row,u'CreatedDate'),getattr(row,u'ModifiedDate'),getattr(row,u'SearchItems')).encode('utf8'))
|
| 329 |
|
|
|
| 330 |
|
|
outputFile.close()
|
| 331 |
|
|
cnxn.close()
|
| 332 |
|
|
|
| 333 |
|
|
#return folderMatrix
|
| 334 |
|
|
|
| 335 |
|
|
|
| 336 |
|
|
|
| 337 |
|
|
def RunDatabaseDefrag (self, viewOnly = True):
|
| 338 |
|
|
"""Runs a database defrag on the current database. default is viewonly, which wont actually run it. overide by setting to false to run"""
|
| 339 |
|
|
internalCaseName = self.caseMatrix[self.currentDatabaseName]
|
| 340 |
|
|
|
| 341 |
nino.borges |
671 |
#cnxn = pyodbc.connect("Driver={ODBC Driver 17 for SQL Server};""Server=XIPRORVW01\XIPRORVW01;""Database=%s;""Trusted_Connection=yes;"%internalCaseName)
|
| 342 |
|
|
cnxn = self._GetNewCnxn(internalCaseName)
|
| 343 |
nino.borges |
650 |
cursor = cnxn.cursor()
|
| 344 |
|
|
|
| 345 |
|
|
## with the db selected, now execute the sp_updatestats part
|
| 346 |
|
|
print "\nPerforming SP_Updatestats..."
|
| 347 |
|
|
cursor.execute("sp_updatestats")
|
| 348 |
|
|
cnxn.close()
|
| 349 |
|
|
print "SP_UpdateStats finished."
|
| 350 |
|
|
|
| 351 |
nino.borges |
671 |
#cnxn = pyodbc.connect("Driver={ODBC Driver 17 for SQL Server};""Server=XIPRORVW01\XIPRORVW01;""Database=evdxAdmin;""Trusted_Connection=yes;")
|
| 352 |
|
|
cnxn = self._GetNewCnxn('evdxAdmin')
|
| 353 |
nino.borges |
650 |
cursor = cnxn.cursor()
|
| 354 |
|
|
|
| 355 |
|
|
|
| 356 |
|
|
## with the eevdxadmin db selected, now run the ap_rebuildindexes part
|
| 357 |
|
|
print "\nPerforming ap_rebuildIndexes..."
|
| 358 |
|
|
cursor.execute("exec ap_rebuildindexes @databasename = '%s'"%internalCaseName)
|
| 359 |
|
|
print "rebuildIndexes finished"
|
| 360 |
|
|
|
| 361 |
nino.borges |
671 |
#cnxn = pyodbc.connect("Driver={ODBC Driver 17 for SQL Server};""Server=XIPRORVW01\XIPRORVW01;""Database=%s;""Trusted_Connection=yes;"%internalCaseName)
|
| 362 |
|
|
cnxn = self._GetNewCnxn(internalCaseName)
|
| 363 |
nino.borges |
650 |
cursor = cnxn.cursor()
|
| 364 |
|
|
|
| 365 |
|
|
## Now read and run the rest by way of a blob.
|
| 366 |
|
|
print "\nNow performing full defrag..."
|
| 367 |
|
|
self._ProcessSQLBlob('defragBlob',cursor)
|
| 368 |
|
|
|
| 369 |
|
|
cnxn.close()
|
| 370 |
|
|
print "Full Defrag Finished."
|
| 371 |
|
|
|
| 372 |
|
|
|