mirror of
https://github.com/status-im/fathom.git
synced 2025-03-01 11:30:28 +00:00
Merge branch 'grouped-referrers'
This commit is contained in:
commit
ff136368c3
7
Makefile
7
Makefile
@ -4,7 +4,8 @@ LDFLAGS += -extldflags "-static"
|
||||
MAIN_PKG := ./cmd/fathom
|
||||
PACKAGES ?= $(shell go list ./... | grep -v /vendor/)
|
||||
JS_SOURCES ?= $(shell find assets/src/. -name "*.js" -type f)
|
||||
SOURCES ?= $(shell find . -name "*.go" -type f)
|
||||
GO_SOURCES ?= $(shell find . -name "*.go" -type f)
|
||||
SQL_SOURCES ?= $(shell find . -name "*.sql" -type f)
|
||||
ENV ?= $(shell export $(cat .env | xargs))
|
||||
GOPATH=$(shell go env GOPATH)
|
||||
|
||||
@ -18,12 +19,12 @@ install: $(wildcard *.go) $(GOPATH)/bin/packr
|
||||
.PHONY: build
|
||||
build: $(EXECUTABLE)
|
||||
|
||||
$(EXECUTABLE): $(SOURCES) assets/build $(GOPATH)/bin/packr
|
||||
$(EXECUTABLE): $(GO_SOURCES) assets/build
|
||||
go build -o $@ $(MAIN_PKG)
|
||||
|
||||
dist: assets/dist build/fathom-linux-amd64
|
||||
|
||||
build/fathom-linux-amd64: $(GOPATH)/bin/packr
|
||||
build/fathom-linux-amd64: $(GOPATH)/bin/packr $(SQL_SOURCES) $(GO_SOURCES) $(JS_SOURCES)
|
||||
GOOS=linux GOARCH=amd64 $(GOPATH)/bin/packr build -v -ldflags '-w $(LDFLAGS)' -o $@ $(MAIN_PKG)
|
||||
|
||||
$(GOPATH)/bin/packr:
|
||||
|
@ -57,21 +57,25 @@ class Table extends Component {
|
||||
|
||||
render(props, state) {
|
||||
const tableRows = state.records !== null && state.records.length > 0 ? state.records.map((p, i) => {
|
||||
let ahref = document.createElement('a');
|
||||
ahref.href = (p.Hostname + p.Pathname) || p.URL;
|
||||
|
||||
let href = (p.Hostname + p.Pathname) || p.URL;
|
||||
let classes = "table-row";
|
||||
if(state.total > 0) {
|
||||
classes += " w" + Math.min(98, Math.round(p.Pageviews / state.total * 100 * 2.5));
|
||||
}
|
||||
|
||||
let label = ahref.pathname + ahref.search;
|
||||
let label = p.Pathname
|
||||
if( props.showHostname ) {
|
||||
label = ahref.hostname.replace('www.', '') + (ahref.pathname.length > 1 ? ahref.pathname : '');
|
||||
if( p.Group) {
|
||||
label = p.Group
|
||||
} else {
|
||||
label = p.Hostname.replace('www.', '').replace('https://', '').replace('http://', '') + (p.Pathname.length > 1 ? p.Pathname : '')
|
||||
}
|
||||
}
|
||||
|
||||
return(
|
||||
<div class={classes}>
|
||||
<div class="cell main-col"><a href={ahref.href}>{label}</a></div>
|
||||
<div class="cell main-col"><a href={href}>{label}</a></div>
|
||||
<div class="cell">{p.Pageviews}</div>
|
||||
<div class="cell">{p.Visitors||"-"}</div>
|
||||
</div>
|
||||
@ -81,8 +85,7 @@ class Table extends Component {
|
||||
<div class={(state.loading ? "loading" : '')}>
|
||||
<div class="table-row header">
|
||||
{props.headers.map((header, i) => {
|
||||
let classes = i === 0 ? 'main-col cell' : 'cell';
|
||||
return (<div class={classes}>{header}</div>)
|
||||
return (<div class={i === 0 ? 'main-col cell' : 'cell'}>{header}</div>)
|
||||
})}
|
||||
</div>
|
||||
<div>
|
||||
|
@ -105,6 +105,7 @@ function trackPageview() {
|
||||
cookies.set('_fathom', JSON.stringify(data), { expires: 60 * 60 * 24});
|
||||
});
|
||||
document.body.appendChild(i);
|
||||
window.setTimeout(() => { document.body.removeChild(i)}, 1000);
|
||||
}
|
||||
|
||||
// override global fathom object
|
||||
|
@ -3,6 +3,7 @@ package aggregator
|
||||
import (
|
||||
"github.com/usefathom/fathom/pkg/datastore"
|
||||
"github.com/usefathom/fathom/pkg/models"
|
||||
"net/url"
|
||||
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
@ -19,17 +20,18 @@ func New(db datastore.Datastore) *aggregator {
|
||||
}
|
||||
|
||||
// Run processes the pageviews which are ready to be processed and adds them to daily aggregation
|
||||
func (agg *aggregator) Run() {
|
||||
func (agg *aggregator) Run() int {
|
||||
// Get unprocessed pageviews
|
||||
pageviews, err := agg.database.GetProcessablePageviews()
|
||||
if err != nil && err != datastore.ErrNoResults {
|
||||
log.Error(err)
|
||||
return
|
||||
return 0
|
||||
}
|
||||
|
||||
// Do we have anything to process?
|
||||
if len(pageviews) == 0 {
|
||||
return
|
||||
n := len(pageviews)
|
||||
if n == 0 {
|
||||
return 0
|
||||
}
|
||||
|
||||
results := agg.Process(pageviews)
|
||||
@ -61,6 +63,8 @@ func (agg *aggregator) Run() {
|
||||
if err != nil {
|
||||
log.Error(err)
|
||||
}
|
||||
|
||||
return n
|
||||
}
|
||||
|
||||
// Process processes the given pageviews and returns the (aggregated) results per metric per day
|
||||
@ -69,87 +73,34 @@ func (agg *aggregator) Process(pageviews []*models.Pageview) *results {
|
||||
results := newResults()
|
||||
|
||||
for _, p := range pageviews {
|
||||
site, err := agg.getSiteStats(results, p.Timestamp)
|
||||
err := agg.handleSiteview(results, p)
|
||||
if err != nil {
|
||||
log.Error(err)
|
||||
continue
|
||||
}
|
||||
|
||||
site.Pageviews += 1
|
||||
|
||||
if p.Duration > 0.00 {
|
||||
site.KnownDurations += 1
|
||||
site.AvgDuration = site.AvgDuration + ((float64(p.Duration) - site.AvgDuration) * 1 / float64(site.KnownDurations))
|
||||
}
|
||||
|
||||
if p.IsNewVisitor {
|
||||
site.Visitors += 1
|
||||
}
|
||||
|
||||
if p.IsNewSession {
|
||||
site.Sessions += 1
|
||||
|
||||
if p.IsBounce {
|
||||
site.BounceRate = ((float64(site.Sessions-1) * site.BounceRate) + 1) / (float64(site.Sessions))
|
||||
} else {
|
||||
site.BounceRate = ((float64(site.Sessions-1) * site.BounceRate) + 0) / (float64(site.Sessions))
|
||||
}
|
||||
}
|
||||
|
||||
pageStats, err := agg.getPageStats(results, p.Timestamp, p.Hostname, p.Pathname)
|
||||
err = agg.handlePageview(results, p)
|
||||
if err != nil {
|
||||
log.Error(err)
|
||||
continue
|
||||
}
|
||||
|
||||
pageStats.Pageviews += 1
|
||||
if p.IsUnique {
|
||||
pageStats.Visitors += 1
|
||||
}
|
||||
|
||||
if p.Duration > 0.00 {
|
||||
pageStats.KnownDurations += 1
|
||||
pageStats.AvgDuration = pageStats.AvgDuration + ((float64(p.Duration) - pageStats.AvgDuration) * 1 / float64(pageStats.KnownDurations))
|
||||
}
|
||||
|
||||
if p.IsNewSession {
|
||||
pageStats.Entries += 1
|
||||
|
||||
if p.IsBounce {
|
||||
pageStats.BounceRate = ((float64(pageStats.Entries-1) * pageStats.BounceRate) + 1.00) / (float64(pageStats.Entries))
|
||||
} else {
|
||||
pageStats.BounceRate = ((float64(pageStats.Entries-1) * pageStats.BounceRate) + 0.00) / (float64(pageStats.Entries))
|
||||
}
|
||||
}
|
||||
|
||||
// referrer stats
|
||||
if p.Referrer != "" {
|
||||
referrerStats, err := agg.getReferrerStats(results, p.Timestamp, p.Referrer)
|
||||
err := agg.handleReferral(results, p)
|
||||
if err != nil {
|
||||
log.Error(err)
|
||||
continue
|
||||
}
|
||||
|
||||
referrerStats.Pageviews += 1
|
||||
|
||||
if p.IsNewVisitor {
|
||||
referrerStats.Visitors += 1
|
||||
}
|
||||
|
||||
if p.IsBounce {
|
||||
referrerStats.BounceRate = ((float64(referrerStats.Pageviews-1) * referrerStats.BounceRate) + 1.00) / (float64(referrerStats.Pageviews))
|
||||
} else {
|
||||
referrerStats.BounceRate = ((float64(referrerStats.Pageviews-1) * referrerStats.BounceRate) + 0.00) / (float64(referrerStats.Pageviews))
|
||||
}
|
||||
|
||||
if p.Duration > 0.00 {
|
||||
referrerStats.KnownDurations += 1
|
||||
referrerStats.AvgDuration = referrerStats.AvgDuration + ((float64(p.Duration) - referrerStats.AvgDuration) * 1 / float64(referrerStats.KnownDurations))
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
func parseUrlParts(s string) (string, string, error) {
|
||||
u, err := url.Parse(s)
|
||||
if err != nil {
|
||||
return "", "", err
|
||||
}
|
||||
|
||||
return u.Scheme + "://" + u.Host, u.Path, nil
|
||||
}
|
||||
|
36
pkg/aggregator/pageviews.go
Normal file
36
pkg/aggregator/pageviews.go
Normal file
@ -0,0 +1,36 @@
|
||||
package aggregator
|
||||
|
||||
import (
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/usefathom/fathom/pkg/models"
|
||||
)
|
||||
|
||||
func (agg *aggregator) handlePageview(results *results, p *models.Pageview) error {
|
||||
pageStats, err := agg.getPageStats(results, p.Timestamp, p.Hostname, p.Pathname)
|
||||
if err != nil {
|
||||
log.Error(err)
|
||||
return err
|
||||
}
|
||||
|
||||
pageStats.Pageviews += 1
|
||||
if p.IsUnique {
|
||||
pageStats.Visitors += 1
|
||||
}
|
||||
|
||||
if p.Duration > 0.00 {
|
||||
pageStats.KnownDurations += 1
|
||||
pageStats.AvgDuration = pageStats.AvgDuration + ((float64(p.Duration) - pageStats.AvgDuration) * 1 / float64(pageStats.KnownDurations))
|
||||
}
|
||||
|
||||
if p.IsNewSession {
|
||||
pageStats.Entries += 1
|
||||
|
||||
if p.IsBounce {
|
||||
pageStats.BounceRate = ((float64(pageStats.Entries-1) * pageStats.BounceRate) + 1.00) / (float64(pageStats.Entries))
|
||||
} else {
|
||||
pageStats.BounceRate = ((float64(pageStats.Entries-1) * pageStats.BounceRate) + 0.00) / (float64(pageStats.Entries))
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
38
pkg/aggregator/referrers.go
Normal file
38
pkg/aggregator/referrers.go
Normal file
@ -0,0 +1,38 @@
|
||||
package aggregator
|
||||
|
||||
import (
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/usefathom/fathom/pkg/models"
|
||||
)
|
||||
|
||||
func (agg *aggregator) handleReferral(results *results, p *models.Pageview) error {
|
||||
hostname, pathname, err := parseUrlParts(p.Referrer)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
referrerStats, err := agg.getReferrerStats(results, p.Timestamp, hostname, pathname)
|
||||
if err != nil {
|
||||
log.Error(err)
|
||||
return err
|
||||
}
|
||||
|
||||
referrerStats.Pageviews += 1
|
||||
|
||||
if p.IsNewVisitor {
|
||||
referrerStats.Visitors += 1
|
||||
}
|
||||
|
||||
if p.IsBounce {
|
||||
referrerStats.BounceRate = ((float64(referrerStats.Pageviews-1) * referrerStats.BounceRate) + 1.00) / (float64(referrerStats.Pageviews))
|
||||
} else {
|
||||
referrerStats.BounceRate = ((float64(referrerStats.Pageviews-1) * referrerStats.BounceRate) + 0.00) / (float64(referrerStats.Pageviews))
|
||||
}
|
||||
|
||||
if p.Duration > 0.00 {
|
||||
referrerStats.KnownDurations += 1
|
||||
referrerStats.AvgDuration = referrerStats.AvgDuration + ((float64(p.Duration) - referrerStats.AvgDuration) * 1 / float64(referrerStats.KnownDurations))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
37
pkg/aggregator/siteviews.go
Normal file
37
pkg/aggregator/siteviews.go
Normal file
@ -0,0 +1,37 @@
|
||||
package aggregator
|
||||
|
||||
import (
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/usefathom/fathom/pkg/models"
|
||||
)
|
||||
|
||||
func (agg *aggregator) handleSiteview(results *results, p *models.Pageview) error {
|
||||
site, err := agg.getSiteStats(results, p.Timestamp)
|
||||
if err != nil {
|
||||
log.Error(err)
|
||||
return err
|
||||
}
|
||||
|
||||
site.Pageviews += 1
|
||||
|
||||
if p.Duration > 0.00 {
|
||||
site.KnownDurations += 1
|
||||
site.AvgDuration = site.AvgDuration + ((float64(p.Duration) - site.AvgDuration) * 1 / float64(site.KnownDurations))
|
||||
}
|
||||
|
||||
if p.IsNewVisitor {
|
||||
site.Visitors += 1
|
||||
}
|
||||
|
||||
if p.IsNewSession {
|
||||
site.Sessions += 1
|
||||
|
||||
if p.IsBounce {
|
||||
site.BounceRate = ((float64(site.Sessions-1) * site.BounceRate) + 1) / (float64(site.Sessions))
|
||||
} else {
|
||||
site.BounceRate = ((float64(site.Sessions-1) * site.BounceRate) + 0) / (float64(site.Sessions))
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
@ -1,6 +1,7 @@
|
||||
package aggregator
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/usefathom/fathom/pkg/datastore"
|
||||
@ -64,14 +65,14 @@ func (agg *aggregator) getPageStats(r *results, t time.Time, hostname string, pa
|
||||
return stats, nil
|
||||
}
|
||||
|
||||
func (agg *aggregator) getReferrerStats(r *results, t time.Time, url string) (*models.ReferrerStats, error) {
|
||||
func (agg *aggregator) getReferrerStats(r *results, t time.Time, hostname string, pathname string) (*models.ReferrerStats, error) {
|
||||
date := t.Format("2006-01-02")
|
||||
if stats, ok := r.Referrers[date+url]; ok {
|
||||
if stats, ok := r.Referrers[date+hostname+pathname]; ok {
|
||||
return stats, nil
|
||||
}
|
||||
|
||||
// get from db
|
||||
stats, err := agg.database.GetReferrerStats(t, url)
|
||||
stats, err := agg.database.GetReferrerStats(t, hostname, pathname)
|
||||
if err != nil && err != datastore.ErrNoResults {
|
||||
return nil, err
|
||||
}
|
||||
@ -79,15 +80,23 @@ func (agg *aggregator) getReferrerStats(r *results, t time.Time, url string) (*m
|
||||
// create in db
|
||||
if stats == nil {
|
||||
stats = &models.ReferrerStats{
|
||||
URL: url,
|
||||
Hostname: hostname,
|
||||
Pathname: pathname,
|
||||
Date: t,
|
||||
Group: "",
|
||||
}
|
||||
|
||||
// TODO: Abstract this
|
||||
if strings.Contains(stats.Hostname, "www.google.") {
|
||||
stats.Group = "Google"
|
||||
}
|
||||
|
||||
err = agg.database.InsertReferrerStats(stats)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
r.Referrers[date+url] = stats
|
||||
r.Referrers[date+hostname+pathname] = stats
|
||||
return stats, nil
|
||||
}
|
||||
|
@ -31,6 +31,7 @@ func parsePathname(p string) string {
|
||||
return "/" + strings.TrimLeft(p, "/")
|
||||
}
|
||||
|
||||
// TODO: Move this to aggregator, as we need this endpoint to be as fast as possible
|
||||
func parseReferrer(r string) string {
|
||||
u, err := url.Parse(r)
|
||||
if err != nil {
|
||||
|
@ -43,7 +43,7 @@ type Datastore interface {
|
||||
GetAggregatedPageStatsPageviews(time.Time, time.Time) (int, error)
|
||||
|
||||
// referrer stats
|
||||
GetReferrerStats(time.Time, string) (*models.ReferrerStats, error)
|
||||
GetReferrerStats(time.Time, string, string) (*models.ReferrerStats, error)
|
||||
InsertReferrerStats(*models.ReferrerStats) error
|
||||
UpdateReferrerStats(*models.ReferrerStats) error
|
||||
GetAggregatedReferrerStats(time.Time, time.Time, int) ([]*models.ReferrerStats, error)
|
||||
|
@ -0,0 +1,19 @@
|
||||
-- +migrate Up
|
||||
|
||||
ALTER TABLE daily_referrer_stats ADD COLUMN groupname VARCHAR(255) NULL;
|
||||
ALTER TABLE daily_referrer_stats ADD COLUMN hostname VARCHAR(255);
|
||||
ALTER TABLE daily_referrer_stats ADD COLUMN pathname VARCHAR(255);
|
||||
|
||||
UPDATE daily_referrer_stats SET hostname = SUBSTRING_INDEX( url, "/", 3) WHERE url != "" ANd hostname = "";
|
||||
UPDATE daily_referrer_stats SET pathname = CONCAT("/", SUBSTRING_INDEX( url, "/", -1)) WHERE url != "" AND pathname = "";
|
||||
|
||||
ALTER TABLE daily_referrer_stats DROP COLUMN url;
|
||||
|
||||
-- +migrate Down
|
||||
|
||||
ALTER TABLE daily_referrer_stats DROP COLUMN groupname;
|
||||
ALTER TABLE daily_referrer_stats DROP COLUMN hostname;
|
||||
ALTER TABLE daily_referrer_stats DROP COLUMN pathname;
|
||||
|
||||
ALTER TABLE daily_referrer_stats ADD COLUMN url VARCHAR(255) NOT NULL;
|
||||
|
@ -0,0 +1,18 @@
|
||||
-- +migrate Up
|
||||
|
||||
ALTER TABLE daily_referrer_stats ADD COLUMN groupname VARCHAR(255) NULL;
|
||||
ALTER TABLE daily_referrer_stats ADD COLUMN hostname VARCHAR(255);
|
||||
ALTER TABLE daily_referrer_stats ADD COLUMN pathname VARCHAR(255);
|
||||
|
||||
UPDATE daily_referrer_stats SET hostname = CONCAT( SPLIT_PART(url, '://', 1), '://', SPLIT_PART(SPLIT_PART(url, '://', 2), '/', 1) ) WHERE url != '' AND hostname = '';
|
||||
UPDATE daily_referrer_stats SET pathname = SPLIT_PART( url, hostname, 2 ) WHERE url != '' AND pathname = '';
|
||||
|
||||
ALTER TABLE daily_referrer_stats DROP COLUMN url;
|
||||
|
||||
-- +migrate Down
|
||||
|
||||
ALTER TABLE daily_referrer_stats DROP COLUMN groupname;
|
||||
ALTER TABLE daily_referrer_stats DROP COLUMN hostname;
|
||||
ALTER TABLE daily_referrer_stats DROP COLUMN pathname;
|
||||
|
||||
ALTER TABLE daily_referrer_stats ADD COLUMN url VARCHAR(255) NOT NULL;
|
@ -0,0 +1,27 @@
|
||||
-- +migrate Up
|
||||
|
||||
ALTER TABLE daily_referrer_stats ADD COLUMN groupname VARCHAR(255) NULL;
|
||||
ALTER TABLE daily_referrer_stats ADD COLUMN hostname VARCHAR(255);
|
||||
ALTER TABLE daily_referrer_stats ADD COLUMN pathname VARCHAR(255);
|
||||
|
||||
UPDATE daily_referrer_stats SET hostname = SUBSTR(url, 0, (INSTR(url, '://')+3+INSTR(SUBSTR(url, INSTR(url, '://')+3), '/')-1)) WHERE url != '' AND (hostname = '' OR hostname IS NULL);
|
||||
UPDATE daily_referrer_stats SET pathname = SUBSTR(url, LENGTH(hostname)+1) WHERE url != '' AND (pathname = '' OR pathname IS NULL);
|
||||
|
||||
-- drop `url` column... oh sqlite
|
||||
ALTER TABLE daily_referrer_stats RENAME TO daily_referrer_stats_old;
|
||||
CREATE TABLE daily_referrer_stats(
|
||||
hostname VARCHAR(255) NOT NULL,
|
||||
pathname VARCHAR(255) NOT NULL,
|
||||
groupname VARCHAR(255) NULL,
|
||||
pageviews INTEGER NOT NULL,
|
||||
visitors INTEGER NOT NULL,
|
||||
bounce_rate FLOAT NOT NULL,
|
||||
avg_duration FLOAT NOT NULL,
|
||||
known_durations INTEGER NOT NULL DEFAULT 0,
|
||||
date DATE NOT NULL
|
||||
);
|
||||
INSERT INTO daily_referrer_stats SELECT hostname, pathname, groupname, pageviews, visitors, bounce_rate, avg_duration, known_durations, date FROM daily_referrer_stats_old;
|
||||
|
||||
-- +migrate Down
|
||||
|
||||
-- TODO....
|
@ -7,10 +7,10 @@ import (
|
||||
"github.com/usefathom/fathom/pkg/models"
|
||||
)
|
||||
|
||||
func (db *sqlstore) GetReferrerStats(date time.Time, url string) (*models.ReferrerStats, error) {
|
||||
func (db *sqlstore) GetReferrerStats(date time.Time, hostname string, pathname string) (*models.ReferrerStats, error) {
|
||||
stats := &models.ReferrerStats{}
|
||||
query := db.Rebind(`SELECT * FROM daily_referrer_stats WHERE url = ? AND date = ? LIMIT 1`)
|
||||
err := db.Get(stats, query, url, date.Format("2006-01-02"))
|
||||
query := db.Rebind(`SELECT * FROM daily_referrer_stats WHERE date = ? AND hostname = ? AND pathname = ? LIMIT 1`)
|
||||
err := db.Get(stats, query, date.Format("2006-01-02"), hostname, pathname)
|
||||
if err != nil && err == sql.ErrNoRows {
|
||||
return nil, ErrNoResults
|
||||
}
|
||||
@ -18,20 +18,32 @@ func (db *sqlstore) GetReferrerStats(date time.Time, url string) (*models.Referr
|
||||
}
|
||||
|
||||
func (db *sqlstore) InsertReferrerStats(s *models.ReferrerStats) error {
|
||||
query := db.Rebind(`INSERT INTO daily_referrer_stats(visitors, pageviews, bounce_rate, avg_duration, known_durations, url, date) VALUES(?, ?, ?, ?, ?, ?, ?)`)
|
||||
_, err := db.Exec(query, s.Visitors, s.Pageviews, s.BounceRate, s.AvgDuration, s.KnownDurations, s.URL, s.Date.Format("2006-01-02"))
|
||||
query := db.Rebind(`INSERT INTO daily_referrer_stats(visitors, pageviews, bounce_rate, avg_duration, known_durations, groupname, hostname, pathname, date) VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?)`)
|
||||
_, err := db.Exec(query, s.Visitors, s.Pageviews, s.BounceRate, s.AvgDuration, s.KnownDurations, s.Group, s.Hostname, s.Pathname, s.Date.Format("2006-01-02"))
|
||||
return err
|
||||
}
|
||||
|
||||
func (db *sqlstore) UpdateReferrerStats(s *models.ReferrerStats) error {
|
||||
query := db.Rebind(`UPDATE daily_referrer_stats SET visitors = ?, pageviews = ?, bounce_rate = ROUND(?, 4), avg_duration = ROUND(?, 4), known_durations = ? WHERE url = ? AND date = ?`)
|
||||
_, err := db.Exec(query, s.Visitors, s.Pageviews, s.BounceRate, s.AvgDuration, s.KnownDurations, s.URL, s.Date.Format("2006-01-02"))
|
||||
query := db.Rebind(`UPDATE daily_referrer_stats SET visitors = ?, pageviews = ?, bounce_rate = ROUND(?, 4), avg_duration = ROUND(?, 4), known_durations = ?, groupname = ? WHERE hostname = ? AND pathname = ? AND date = ?`)
|
||||
_, err := db.Exec(query, s.Visitors, s.Pageviews, s.BounceRate, s.AvgDuration, s.KnownDurations, s.Group, s.Hostname, s.Pathname, s.Date.Format("2006-01-02"))
|
||||
return err
|
||||
}
|
||||
|
||||
func (db *sqlstore) GetAggregatedReferrerStats(startDate time.Time, endDate time.Time, limit int) ([]*models.ReferrerStats, error) {
|
||||
var result []*models.ReferrerStats
|
||||
query := db.Rebind(`SELECT url, SUM(visitors) AS visitors, SUM(pageviews) AS pageviews, COALESCE(ROUND(SUM(pageviews*bounce_rate)/SUM(pageviews), 4), 0.00) AS bounce_rate, COALESCE(ROUND(SUM(avg_duration*pageviews)/SUM(pageviews), 4), 0.00) AS avg_duration FROM daily_referrer_stats WHERE date >= ? AND date <= ? GROUP BY url ORDER BY pageviews DESC LIMIT ?`)
|
||||
query := db.Rebind(`
|
||||
SELECT
|
||||
MIN(hostname) AS hostname,
|
||||
MIN(pathname) AS pathname,
|
||||
MIN(COALESCE(groupname, "")) AS groupname,
|
||||
SUM(visitors) AS visitors,
|
||||
SUM(pageviews) AS pageviews,
|
||||
COALESCE(ROUND(SUM(pageviews*bounce_rate)/SUM(pageviews), 4), 0.00) AS bounce_rate,
|
||||
COALESCE(ROUND(SUM(avg_duration*pageviews)/SUM(pageviews), 4), 0.00) AS avg_duration
|
||||
FROM daily_referrer_stats
|
||||
WHERE date >= ? AND date <= ?
|
||||
GROUP BY COALESCE(NULLIF(groupname, ""), hostname || pathname) ORDER BY pageviews DESC LIMIT ?`)
|
||||
|
||||
err := db.Select(&result, query, startDate.Format("2006-01-02"), endDate.Format("2006-01-02"), limit)
|
||||
return result, err
|
||||
}
|
||||
|
@ -29,6 +29,12 @@ func New(c *Config) *sqlstore {
|
||||
// write log statement
|
||||
log.Infof("Connected to %s database: %s", c.Driver, c.Name)
|
||||
|
||||
// Driver specific database options
|
||||
if c.Driver == "mysql" {
|
||||
// Because SQLite doesn't have CONCAT, tell MySQL to accept pipes for concatenating string columns
|
||||
db.Exec("SET sql_mode=PIPES_AS_CONCAT;")
|
||||
}
|
||||
|
||||
// run migrations
|
||||
db.Migrate()
|
||||
|
||||
|
@ -5,7 +5,9 @@ import (
|
||||
)
|
||||
|
||||
type ReferrerStats struct {
|
||||
URL string `db:"url"`
|
||||
Hostname string `db:"hostname"`
|
||||
Pathname string `db:"pathname"`
|
||||
Group string `db:"groupname"`
|
||||
Visitors int64 `db:"visitors"`
|
||||
Pageviews int64 `db:"pageviews"`
|
||||
BounceRate float64 `db:"bounce_rate"`
|
||||
|
Loading…
x
Reference in New Issue
Block a user