Merge pull request #74 from Aquilao/master

add parse Firefox download history
pull/78/head
ᴍᴏᴏɴD4ʀᴋ 4 years ago committed by GitHub
commit 22474aaec4
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 6
      core/browser.go
  2. 61
      core/data/parse.go

@ -56,7 +56,7 @@ const (
)
var (
errItemNotSupported = errors.New(`item not supported, default is "all", choose from history|password|bookmark|cookie`)
errItemNotSupported = errors.New(`item not supported, default is "all", choose from history|download|password|bookmark|cookie`)
errBrowserNotSupported = errors.New("browser not supported")
errChromeSecretIsEmpty = errors.New("chrome secret is empty")
errDbusSecretIsEmpty = errors.New("dbus secret key is empty")
@ -109,6 +109,10 @@ var (
mainFile: data.FirefoxDataFile,
newItem: data.NewHistoryData,
},
download: {
mainFile: data.FirefoxDataFile,
newItem: data.NewDownloads,
},
password: {
mainFile: data.FirefoxKey4File,
subFile: data.FirefoxLoginFile,

@ -8,6 +8,7 @@ import (
"os"
"path/filepath"
"sort"
"strings"
"time"
"hack-browser-data/core/decrypt"
@ -55,6 +56,7 @@ var (
queryChromiumDownload = `SELECT target_path, tab_url, total_bytes, start_time, end_time, mime_type FROM downloads`
queryChromiumCookie = `SELECT name, encrypted_value, host_key, path, creation_utc, expires_utc, is_secure, is_httponly, has_expires, is_persistent FROM cookies`
queryFirefoxHistory = `SELECT id, url, last_visit_date, title, visit_count FROM moz_places`
queryFirefoxDownload = `SELECT place_id, GROUP_CONCAT(content), url, dateAdded FROM (SELECT * FROM moz_annos INNER JOIN moz_places ON moz_annos.place_id=moz_places.id) t GROUP BY place_id`
queryFirefoxBookMarks = `SELECT id, fk, type, dateAdded, title FROM moz_bookmarks`
queryFirefoxCookie = `SELECT name, value, host, path, creationTime, expiry, isSecure, isHttpOnly FROM moz_cookies`
queryMetaData = `SELECT item1, item2 FROM metaData WHERE id = 'password'`
@ -489,7 +491,7 @@ func (d *downloads) ChromeParse(key []byte) error {
TotalBytes: totalBytes,
StartTime: utils.TimeEpochFormat(startTime),
EndTime: utils.TimeEpochFormat(endTime),
MimiType: mimeType,
MimeType: mimeType,
}
if err != nil {
log.Error(err)
@ -500,6 +502,61 @@ func (d *downloads) ChromeParse(key []byte) error {
}
func (d *downloads) FirefoxParse() error {
var (
err error
keyDB *sql.DB
downloadRows *sql.Rows
tempMap map[int64]string
)
tempMap = make(map[int64]string)
keyDB, err = sql.Open("sqlite3", FirefoxDataFile)
if err != nil {
return err
}
_, err = keyDB.Exec(closeJournalMode)
if err != nil {
log.Error(err)
}
defer func() {
if err := keyDB.Close(); err != nil {
log.Error(err)
}
}()
downloadRows, err = keyDB.Query(queryFirefoxDownload)
if err != nil {
log.Error(err)
return err
}
defer func() {
if err := downloadRows.Close(); err != nil {
log.Error(err)
}
}()
for downloadRows.Next() {
var (
content, url string
placeID, dateAdded int64
)
err = downloadRows.Scan(&placeID, &content, &url, &dateAdded)
if err != nil {
log.Warn(err)
}
contentList := strings.Split(content, ",{")
if len(contentList) > 1 {
path := contentList[0]
json := "{" + contentList[1]
endTime := gjson.Get(json, "endTime")
fileSize := gjson.Get(json, "fileSize")
d.downloads = append(d.downloads, download{
TargetPath: path,
Url: url,
TotalBytes: fileSize.Int(),
StartTime: utils.TimeStampFormat(dateAdded / 1000000),
EndTime: utils.TimeStampFormat(endTime.Int() / 1000),
})
}
tempMap[placeID] = url
}
return nil
}
@ -897,7 +954,7 @@ type (
TotalBytes int64
StartTime time.Time
EndTime time.Time
MimiType string
MimeType string
}
card struct {
GUID string

Loading…
Cancel
Save