get general data flow up & running again for site stats

This commit is contained in:
Danny 2018-05-07 16:05:53 +02:00
parent 082073afb8
commit 433c6e2f4d
17 changed files with 294 additions and 225 deletions

View File

@ -47,7 +47,7 @@ class CountWidget extends Component {
let before = this.state.before;
let after = this.state.after;
Client.request(`${this.props.endpoint}/count?before=${before}&after=${after}`)
Client.request(`${this.props.endpoint}?before=${before}&after=${after}`)
.then((d) => {
// request finished; check if timestamp range is still the one user wants to see
if( this.state.before != before || this.state.after != after ) {

View File

@ -25,7 +25,7 @@ class Realtime extends Component {
@bind
fetchData() {
Client.request(`visitors/count/realtime`)
Client.request(`stats/site/realtime`)
.then((d) => { this.setState({ count: d })})
}

View File

@ -48,10 +48,10 @@ class Dashboard extends Component {
<div class="boxes">
<div class="box box-totals animated fadeInUp delayed_03s">
<CountWidget title="Unique visitors" endpoint="visitors" before={state.before} after={state.after} />
<CountWidget title="Page views" endpoint="pageviews" before={state.before} after={state.after} />
<CountWidget title="Avg time on site" endpoint="time-on-site" format="duration" before={state.before} after={state.after} />
<CountWidget title="Bounce rate" endpoint="bounces" format="percentage" before={state.before} after={state.after} />
<CountWidget title="Unique visitors" endpoint="stats/site/visitors" before={state.before} after={state.after} />
<CountWidget title="Page views" endpoint="stats/site/pageviews" before={state.before} after={state.after} />
<CountWidget title="Avg time on site" endpoint="stats/site/duration" format="duration" before={state.before} after={state.after} />
<CountWidget title="Bounce rate" endpoint="stats/site/bounces" format="percentage" before={state.before} after={state.after} />
</div>
<Table endpoint="pageviews" headers={["Top pages", "Views", "Uniques"]} before={state.before} after={state.after} />

View File

@ -25,7 +25,7 @@ function generateKey() {
}
function getData() {
var data = Cookies.get('_fathom');
var data = cookies.get('_fathom');
if(data) {
try{
@ -83,7 +83,7 @@ function trackPageview() {
r: referrer,
scheme: location.protocol.substring(0, location.protocol.length - 1),
u: data.pagesViewed.indexOf(path) == -1 ? 1 : 0,
b: data.isNew ? 1 : 0, // because only new visitors can bounce. we update this server-side.
b: 1, // TODO: if last pageview is less than 30 mins ago, we can already say this is not a bounce.
n: data.isNew ? 1 : 0,
};
@ -92,7 +92,7 @@ function trackPageview() {
i.addEventListener('load', function() {
data.pagesViewed.push(path);
data.isNew = false;
Cookies.set('_fathom', JSON.stringify(data), { expires: 60 * 60 * 24});
cookies.set('_fathom', JSON.stringify(data), { expires: 60 * 60 * 24});
});
document.body.appendChild(i);
}

View File

@ -6,46 +6,14 @@ import (
"time"
"github.com/mssola/user_agent"
"github.com/usefathom/fathom/pkg/counter"
"github.com/usefathom/fathom/pkg/datastore"
"github.com/usefathom/fathom/pkg/models"
log "github.com/sirupsen/logrus"
)
var buffer []*models.RawPageview
var bufferSize = 50
var timeout = 200 * time.Millisecond
func persistPageviews() {
if len(buffer) > 0 {
err := datastore.SaveRawPageviews(buffer)
if err != nil {
log.Errorf("error saving pageviews: %s", err)
}
// clear buffer regardless of error... this means data loss, but better than filling the buffer for now
buffer = buffer[:0]
}
}
func processBuffer(pv chan *models.RawPageview) {
for {
select {
case pageview := <-pv:
buffer = append(buffer, pageview)
if len(buffer) >= bufferSize {
persistPageviews()
}
case <-time.After(timeout):
persistPageviews()
}
}
}
/* middleware */
func NewCollectHandler() http.Handler {
pageviews := make(chan *models.RawPageview, bufferSize)
go processBuffer(pageviews)
go aggregate()
return HandlerFunc(func(w http.ResponseWriter, r *http.Request) error {
// abort if this is a bot.
@ -59,7 +27,7 @@ func NewCollectHandler() http.Handler {
now := time.Now()
// get pageview details
pageview := &models.RawPageview{
pageview := &models.Pageview{
SessionID: q.Get("sid"),
Pathname: q.Get("p"),
IsNewVisitor: q.Get("n") == "1",
@ -70,12 +38,27 @@ func NewCollectHandler() http.Handler {
Timestamp: now,
}
err := datastore.SaveRawPageview(pageview)
// find previous pageview by same visitor
previousPageview, err := datastore.GetMostRecentPageviewBySessionID(pageview.SessionID)
if err != nil && err != datastore.ErrNoResults {
return err
}
// if we have a recent pageview that is less than 30 minutes old
if previousPageview != nil && previousPageview.Timestamp.After(now.Add(-30*time.Minute)) {
previousPageview.Duration = (now.Unix() - previousPageview.Timestamp.Unix())
previousPageview.IsBounce = false
err := datastore.UpdatePageview(previousPageview)
if err != nil {
return err
}
}
// save new pageview
err = datastore.SavePageview(pageview)
if err != nil {
return err
}
// push onto channel
//pageviews <- pageview
// don't you cache this
w.Header().Set("Content-Type", "image/gif")
@ -90,3 +73,16 @@ func NewCollectHandler() http.Handler {
return nil
})
}
// runs the aggregate func every 5 mins
func aggregate() {
counter.Aggregate()
timeout := 5 * time.Minute
for {
select {
case <-time.After(timeout):
counter.Aggregate()
}
}
}

View File

@ -21,21 +21,25 @@ func getRequestedLimit(r *http.Request) int64 {
return limit
}
func getRequestedPeriods(r *http.Request) (int64, int64) {
var before, after int64
func getRequestedDatePeriods(r *http.Request) (time.Time, time.Time) {
var startDate, endDate time.Time
var err error
before, err = strconv.ParseInt(r.URL.Query().Get("before"), 10, 64)
if err != nil || before == 0 {
before = time.Now().Unix()
beforeUnix, err := strconv.ParseInt(r.URL.Query().Get("before"), 10, 64)
if err != nil || beforeUnix == 0 {
endDate = time.Now()
} else {
endDate = time.Unix(beforeUnix, 0)
}
after, err = strconv.ParseInt(r.URL.Query().Get("after"), 10, 64)
if err != nil || before == 0 {
after = time.Now().AddDate(0, 0, -defaultPeriod).Unix()
afterUnix, err := strconv.ParseInt(r.URL.Query().Get("after"), 10, 64)
if err != nil || afterUnix == 0 {
startDate = endDate.AddDate(0, 0, -defaultPeriod)
} else {
startDate = time.Unix(afterUnix, 0)
}
return before, after
return startDate, endDate
}
func parseMajorMinor(v string) string {

View File

@ -12,7 +12,11 @@ func Routes(webroot string) *mux.Router {
r.Handle("/api/session", LoginHandler).Methods(http.MethodPost)
r.Handle("/api/session", LogoutHandler).Methods(http.MethodDelete)
r.Handle("/api/stats/page", Authorize(GetPageStatsHandler)).Methods(http.MethodGet)
r.Handle("/api/stats/site/pageviews", Authorize(GetSiteStatsPageviewsHandler)).Methods(http.MethodGet)
r.Handle("/api/stats/site/visitors", Authorize(GetSiteStatsVisitorsHandler)).Methods(http.MethodGet)
r.Handle("/api/stats/site/duration", Authorize(GetSiteStatsDurationHandler)).Methods(http.MethodGet)
r.Handle("/api/stats/site/bounces", Authorize(GetSiteStatsBouncesHandler)).Methods(http.MethodGet)
r.Handle("/api/stats/site/realtime", Authorize(GetSiteStatsRealtimeHandler)).Methods(http.MethodGet)
r.Path("/tracker.js").Handler(http.FileServer(http.Dir(webroot + "/js/")))
r.PathPrefix("/").Handler(http.FileServer(http.Dir(webroot)))

View File

@ -1,8 +1,10 @@
package api
import (
"github.com/usefathom/fathom/pkg/models"
"net/http"
"github.com/usefathom/fathom/pkg/datastore"
"github.com/usefathom/fathom/pkg/models"
)
// URL: /api/stats/site
@ -16,29 +18,49 @@ var GetSiteStatsHandler = HandlerFunc(func(w http.ResponseWriter, r *http.Reques
// URL: /api/stats/site/pageviews
var GetSiteStatsPageviewsHandler = HandlerFunc(func(w http.ResponseWriter, r *http.Request) error {
// before, after := getRequestedPeriods(r)
// limit := getRequestedLimit(r)
var result int
startDate, endDate := getRequestedDatePeriods(r)
result, err := datastore.GetTotalSiteViews(startDate, endDate)
if err != nil {
return err
}
return respond(w, envelope{Data: result})
})
// URL: /api/stats/site/visitors
var GetSiteStatsVisitorsHandler = HandlerFunc(func(w http.ResponseWriter, r *http.Request) error {
// before, after := getRequestedPeriods(r)
var result int
startDate, endDate := getRequestedDatePeriods(r)
result, err := datastore.GetTotalSiteVisitors(startDate, endDate)
if err != nil {
return err
}
return respond(w, envelope{Data: result})
})
// URL: /api/stats/site/avg-duration
var GetSiteStatsAvgDurationHandler = HandlerFunc(func(w http.ResponseWriter, r *http.Request) error {
// before, after := getRequestedPeriods(r)
var result int
// URL: /api/stats/site/duration
var GetSiteStatsDurationHandler = HandlerFunc(func(w http.ResponseWriter, r *http.Request) error {
startDate, endDate := getRequestedDatePeriods(r)
result, err := datastore.GetAverageSiteDuration(startDate, endDate)
if err != nil {
return err
}
return respond(w, envelope{Data: result})
})
// URL: /api/stats/site/avg-bounce
var GetSiteStatusAvgBounceHandler = HandlerFunc(func(w http.ResponseWriter, r *http.Request) error {
// before, after := getRequestedPeriods(r)
var result int
// URL: /api/stats/site/bounces
var GetSiteStatsBouncesHandler = HandlerFunc(func(w http.ResponseWriter, r *http.Request) error {
startDate, endDate := getRequestedDatePeriods(r)
result, err := datastore.GetAverageSiteBounceRate(startDate, endDate)
if err != nil {
return err
}
return respond(w, envelope{Data: result})
})
// URL: /api/stats/site/realtime
var GetSiteStatsRealtimeHandler = HandlerFunc(func(w http.ResponseWriter, r *http.Request) error {
result, err := datastore.GetRealtimeVisitorCount()
if err != nil {
return err
}
return respond(w, envelope{Data: result})
})

View File

@ -5,12 +5,15 @@ import (
"github.com/usefathom/fathom/pkg/datastore"
"github.com/usefathom/fathom/pkg/models"
log "github.com/sirupsen/logrus"
)
func Aggregate() error {
now := time.Now()
// Get unprocessed pageviews
pageviews, err := datastore.GetRawPageviews()
pageviews, err := datastore.GetProcessablePageviews()
if err != nil && err != datastore.ErrNoResults {
return err
}
@ -21,33 +24,75 @@ func Aggregate() error {
}
// site stats
date := time.Now()
siteStats, err := getSiteStats(date)
siteStats, err := getSiteStats(now)
if err != nil {
return err
}
pages := map[string]*models.PageStats{}
for _, p := range pageviews {
// site stats
siteStats.Pageviews += 1
if p.IsNewVisitor {
siteStats.Visitors += 1
siteStats.BouncedN += 1
// TODO: Only new sessions can bounce, not only new visitors. So this is inaccurate right now.
if p.IsBounce {
siteStats.Bounced += 1
siteStats.Bounced = ((siteStats.BouncedN * siteStats.Bounced) + 10) / (siteStats.BouncedN + 1)
} else {
siteStats.Bounced = ((siteStats.BouncedN * siteStats.Bounced) + 0) / (siteStats.BouncedN + 1)
}
siteStats.BouncedN += 1
}
// TODO: duration
siteStats.AvgDuration = ((siteStats.AvgDuration * siteStats.AvgDurationN) + p.Duration) / (siteStats.AvgDurationN + 1)
siteStats.AvgDurationN += 1
// page stats
var pageStats *models.PageStats
var ok bool
if pageStats, ok = pages[p.Pathname]; !ok {
pageStats, err = getPageStats(now, p.Pathname)
if err != nil {
log.Error(err)
continue
}
pages[p.Pathname] = pageStats
}
pageStats.Views += 1
if p.IsUnique {
pageStats.UniqueViews += 1
}
pageStats.AvgDuration = ((pageStats.AvgDuration * pageStats.AvgDurationN) + p.Duration) / (pageStats.AvgDurationN + 1)
pageStats.AvgDurationN += 1
if p.IsNewVisitor {
if p.IsBounce {
pageStats.Bounced = ((pageStats.BouncedN * pageStats.Bounced) + 1) / (pageStats.BouncedN + 1)
} else {
pageStats.Bounced = ((pageStats.BouncedN * pageStats.Bounced) + 0) / (pageStats.BouncedN + 1)
}
pageStats.BouncedN += 1
}
// TODO: referrer stats
}
for _, pageStats := range pages {
err = datastore.UpdatePageStats(pageStats)
if err != nil {
log.Error(err)
return err
}
}
// TODO: page stats
// TODO: referrer stats
err = datastore.SaveSiteStats(siteStats)
err = datastore.UpdateSiteStats(siteStats)
if err != nil {
log.Error(err)
return err
}
@ -61,11 +106,31 @@ func getSiteStats(date time.Time) (*models.SiteStats, error) {
return nil, err
}
if stats == nil {
return &models.SiteStats{
Date: date,
}, nil
if stats != nil {
return stats, nil
}
return stats, nil
stats = &models.SiteStats{
Date: date,
}
err = datastore.InsertSiteStats(stats)
return stats, err
}
func getPageStats(date time.Time, pathname string) (*models.PageStats, error) {
stats, err := datastore.GetPageStats(date, pathname)
if err != nil && err != datastore.ErrNoResults {
return nil, err
}
if stats != nil {
return stats, nil
}
stats = &models.PageStats{
Pathname: pathname,
Date: date,
}
err = datastore.InsertPageStats(stats)
return stats, err
}

View File

@ -38,7 +38,7 @@ func getDSN(driver string, host string, name string, user string, password strin
case "postgres":
dsn = fmt.Sprintf("host=%s user=%s password=%s dbname=%s", host, user, password, name)
case "mysql":
dsn = fmt.Sprintf("%s:%s@%s/%s?parseTime=true", user, password, host, name)
dsn = fmt.Sprintf("%s:%s@%s/%s?parseTime=true&loc=Local", user, password, host, name)
}
return dsn

View File

@ -1,4 +1,5 @@
-- +migrate Up
ALTER TABLE pageviews DROP COLUMN time_on_page;
ALTER TABLE pageviews ADD COLUMN time_on_page INT(4) DEFAULT 0;

View File

@ -1 +1,33 @@
package datastore
import (
"database/sql"
"github.com/usefathom/fathom/pkg/models"
"time"
)
const sqlSelectPageStat = `SELECT * FROM daily_page_stats WHERE date = ? AND pathname = ? LIMIT 1`
const sqlInsertPageStats = `INSERT INTO daily_page_stats(views, unique_views, bounced, bounced_n, avg_duration, avg_duration_n, pathname, date) VALUES(?, ?, ?, ?, ?, ?, ?, ?)`
const sqlUpdatePageStats = `UPDATE daily_page_stats SET views = ?, unique_views = ?, bounced = ?, bounced_n = ?, avg_duration = ?, avg_duration_n = ? WHERE pathname = ? AND date = ?`
func GetPageStats(date time.Time, pathname string) (*models.PageStats, error) {
stats := &models.PageStats{}
query := dbx.Rebind(sqlSelectPageStat)
err := dbx.Get(stats, query, date.Format("2006-01-02"), pathname)
if err != nil && err == sql.ErrNoRows {
return nil, ErrNoResults
}
return stats, err
}
func InsertPageStats(s *models.PageStats) error {
query := dbx.Rebind(sqlInsertPageStats)
_, err := dbx.Exec(query, s.Views, s.UniqueViews, s.Bounced, s.BouncedN, s.AvgDuration, s.AvgDurationN, s.Pathname, s.Date.Format("2006-01-02"))
return err
}
func UpdatePageStats(s *models.PageStats) error {
query := dbx.Rebind(sqlUpdatePageStats)
_, err := dbx.Exec(query, s.Views, s.UniqueViews, s.Bounced, s.BouncedN, s.AvgDuration, s.AvgDurationN, s.Pathname, s.Date.Format("2006-01-02"))
return err
}

View File

@ -1,76 +1,38 @@
package datastore
/*
import (
"database/sql"
"github.com/usefathom/fathom/pkg/models"
"time"
)
const sqlInsertPageview = `INSERT INTO raw_pageviews(session_id, pathname, is_new_visitor, is_unique, is_bounce, referrer, duration, timestamp) VALUES(?, ?, ?, ?, ?, ?, ?, ?)`
const sqlUpdatePageview = `UPDATE raw_pageviews SET is_bounce = ?, duration = ? WHERE id = ?`
const sqlSelectProcessablePageviews = `SELECT * FROM raw_pageviews WHERE ( duration > 0 OR timestamp < ? ) AND timestamp < ? LIMIT 500`
const sqlSelectMostRecentPageviewBySessionID = `SELECT * FROM raw_pageviews WHERE session_id = ? ORDER BY id DESC LIMIT 1`
// SavePageview inserts a single pageview model into the connected database
func SavePageview(pv *models.Pageview) error {
query := dbx.Rebind(`INSERT INTO pageviews(page_id, visitor_id, referrer_url, referrer_keyword, bounced, time_on_page, timestamp) VALUES( ?, ?, ?, ?, ?, ?, ?)`)
result, err := dbx.Exec(query, pv.PageID, pv.VisitorID, pv.ReferrerUrl, pv.ReferrerKeyword, pv.Bounced, pv.TimeOnPage, pv.Timestamp)
func SavePageview(p *models.Pageview) error {
query := dbx.Rebind(sqlInsertPageview)
result, err := dbx.Exec(query, p.SessionID, p.Pathname, p.IsNewVisitor, p.IsUnique, p.IsBounce, p.Referrer, p.Duration, p.Timestamp)
if err != nil {
return err
}
pv.ID, _ = result.LastInsertId()
p.ID, _ = result.LastInsertId()
return nil
}
// SavePageviews inserts multiple pageview models into the connected database using a transaction
func SavePageviews(pvs []*models.Pageview) error {
query := dbx.Rebind(`INSERT INTO pageviews(page_id, visitor_id, referrer_url, referrer_keyword, bounced, time_on_page, timestamp ) VALUES( ?, ?, ?, ?, ?, ?, ? )`)
tx, err := dbx.Begin()
if err != nil {
return err
}
stmt, err := tx.Prepare(query)
if err != nil {
return err
}
defer stmt.Close()
for _, pv := range pvs {
result, err := stmt.Exec(pv.PageID, pv.VisitorID, pv.ReferrerUrl, pv.ReferrerKeyword, pv.Bounced, pv.TimeOnPage, pv.Timestamp)
if err != nil {
return err
}
pv.ID, err = result.LastInsertId()
}
err = tx.Commit()
return err
}
// UpdatePageview updates an existing pageview
func UpdatePageview(p *models.Pageview) error {
query := dbx.Rebind(`UPDATE pageviews SET bounced = ?, time_on_page = ? WHERE id = ?`)
_, err := dbx.Exec(query, p.Bounced, p.TimeOnPage, p.ID)
query := dbx.Rebind(sqlUpdatePageview)
_, err := dbx.Exec(query, p.IsBounce, p.Duration, p.ID)
return err
}
// GetPageview retrieves a pageview by its ID
func GetPageview(ID int64) (*models.Pageview, error) {
p := &models.Pageview{}
query := dbx.Rebind(`SELECT * FROM pageviews WHERE id = ? LIMIT 1`)
err := dbx.Get(p, query, ID)
if err != nil {
return nil, ErrNoResults
}
return p, nil
}
func GetLastPageviewForVisitor(visitorID int64) (*models.Pageview, error) {
p := &models.Pageview{}
query := dbx.Rebind(`SELECT * FROM pageviews WHERE visitor_id = ? ORDER BY id DESC LIMIT 1`)
err := dbx.Get(p, query, visitorID)
func GetMostRecentPageviewBySessionID(sessionID string) (*models.Pageview, error) {
result := &models.Pageview{}
query := dbx.Rebind(sqlSelectMostRecentPageviewBySessionID)
err := dbx.Get(result, query, sessionID)
if err != nil {
if err == sql.ErrNoRows {
@ -80,21 +42,14 @@ func GetLastPageviewForVisitor(visitorID int64) (*models.Pageview, error) {
return nil, err
}
return p, nil
return result, nil
}
func PageviewCountPerPageAndDay(before string, after string) ([]*models.Total, error) {
query := dbx.Rebind(`SELECT
pv.page_id,
COUNT(*) AS count,
COUNT(DISTINCT(pv.visitor_id)) AS count_unique,
DATE_FORMAT(pv.timestamp, '%Y-%m-%d') AS date_group
FROM pageviews pv
WHERE pv.timestamp < ? AND pv.timestamp > ?
GROUP BY pv.page_id, date_group`)
var results []*models.Total
err := dbx.Select(&results, query, before, after)
func GetProcessablePageviews() ([]*models.Pageview, error) {
var results []*models.Pageview
thirtyMinsAgo := time.Now().Add(-30 * time.Minute)
fiveMinsAgo := time.Now().Add(-5 * time.Minute)
query := dbx.Rebind(sqlSelectProcessablePageviews)
err := dbx.Select(&results, query, thirtyMinsAgo, fiveMinsAgo)
return results, err
}
*/

View File

@ -1,32 +0,0 @@
package datastore
import (
"github.com/usefathom/fathom/pkg/models"
)
const sqlInsertRawPageview = `INSERT INTO raw_pageviews(session_id, pathname, is_new_visitor, is_unique, is_bounce, referrer, duration, timestamp) VALUES(?, ?, ?, ?, ?, ?, ?, ?)`
const sqlSelectRawPageviews = `SELECT * FROM raw_pageviews`
// SaveRawPageview inserts a single pageview model into the connected database
func SaveRawPageview(p *models.RawPageview) error {
query := dbx.Rebind(sqlInsertRawPageview)
result, err := dbx.Exec(query, p.SessionID, p.Pathname, p.IsNewVisitor, p.IsUnique, p.IsBounce, p.Referrer, p.Duration, p.Timestamp)
if err != nil {
return err
}
p.ID, _ = result.LastInsertId()
return nil
}
// SaveRawPageviews inserts multiple pageviews
func SaveRawPageviews(p []*models.RawPageview) error {
return nil // TODO: Implement this method
}
func GetRawPageviews() ([]*models.RawPageview, error) {
var results []*models.RawPageview
query := dbx.Rebind(sqlSelectRawPageviews)
err := dbx.Select(&results, query)
return results, err
}

View File

@ -8,28 +8,66 @@ import (
const sqlSelectSiteStat = `SELECT * FROM daily_site_stats WHERE date = ? LIMIT 1`
const sqlInsertSiteStats = `INSERT INTO daily_site_stats(visitors, pageviews, bounced, bounced_n, avg_duration, avg_duration_n, date) VALUES(?, ?, ?, ?, ?, ?, ?)`
const sqlUpdateSiteStats = `UPDATE daily_site_stats SET visitors = ?, pageviews = ?, bounced = ?, bounced_n = ?, avg_duration = ?, avg_duration_n = ? WHERE date = ?`
/*
visitors INT NOT NULL,
pageviews INT NOT NULL,
bounced INT NOT NULL,
bounced_n INT NOT NULL,
avg_duration INT NOT NULL,
avg_duration_n INT NOT NULL,
date DATE NOT NULL
*/
func GetSiteStats(date time.Time) (*models.SiteStats, error) {
stats := &models.SiteStats{}
query := dbx.Rebind(sqlSelectSiteStat)
err := dbx.Get(stats, query, date)
err := dbx.Get(stats, query, date.Format("2006-01-02"))
if err != nil && err == sql.ErrNoRows {
return nil, ErrNoResults
}
return stats, err
}
func SaveSiteStats(s *models.SiteStats) error {
func InsertSiteStats(s *models.SiteStats) error {
query := dbx.Rebind(sqlInsertSiteStats)
_, err := dbx.Exec(query, s.Visitors, s.Pageviews, s.Bounced, s.BouncedN, s.AvgDuration, s.AvgDurationN, s.Date)
_, err := dbx.Exec(query, s.Visitors, s.Pageviews, s.Bounced, s.BouncedN, s.AvgDuration, s.AvgDurationN, s.Date.Format("2006-01-02"))
return err
}
func UpdateSiteStats(s *models.SiteStats) error {
query := dbx.Rebind(sqlUpdateSiteStats)
_, err := dbx.Exec(query, s.Visitors, s.Pageviews, s.Bounced, s.BouncedN, s.AvgDuration, s.AvgDurationN, s.Date.Format("2006-01-02"))
return err
}
func GetTotalSiteViews(startDate time.Time, endDate time.Time) (int64, error) {
sql := `SELECT COALESCE(SUM(pageviews), 0) FROM daily_site_stats WHERE date >= ? AND date <= ?`
query := dbx.Rebind(sql)
var total int64
err := dbx.Get(&total, query, startDate.Format("2006-01-02"), endDate.Format("2006-01-02"))
return total, err
}
func GetTotalSiteVisitors(startDate time.Time, endDate time.Time) (int64, error) {
sql := `SELECT COALESCE(SUM(visitors), 0) FROM daily_site_stats WHERE date >= ? AND date <= ?`
query := dbx.Rebind(sql)
var total int64
err := dbx.Get(&total, query, startDate.Format("2006-01-02"), endDate.Format("2006-01-02"))
return total, err
}
func GetAverageSiteDuration(startDate time.Time, endDate time.Time) (int64, error) {
sql := `SELECT COALESCE(ROUND(AVG(avg_duration), 0), 0) FROM daily_site_stats WHERE date >= ? AND date <= ?`
query := dbx.Rebind(sql)
var total int64
err := dbx.Get(&total, query, startDate.Format("2006-01-02"), endDate.Format("2006-01-02"))
return total, err
}
func GetAverageSiteBounceRate(startDate time.Time, endDate time.Time) (int64, error) {
sql := `SELECT COALESCE(ROUND(AVG(bounced), 0), 0) FROM daily_site_stats WHERE date >= ? AND date <= ?`
query := dbx.Rebind(sql)
var total int64
err := dbx.Get(&total, query, startDate.Format("2006-01-02"), endDate.Format("2006-01-02"))
return total, err
}
func GetRealtimeVisitorCount() (int64, error) {
sql := `SELECT COUNT(DISTINCT(session_id)) FROM raw_pageviews WHERE timestamp > ?`
query := dbx.Rebind(sql)
var total int64
err := dbx.Get(&total, query, time.Now().Add(-5*time.Minute))
return total, err
}

View File

@ -5,12 +5,13 @@ import (
)
type Pageview struct {
ID int64 `db:"id"`
PageID int64 `db:"page_id"`
VisitorID int64 `db:"visitor_id"`
Bounced bool `db:"bounced"`
ReferrerKeyword string `db:"referrer_keyword"`
ReferrerUrl string `db:"referrer_url"`
TimeOnPage int64 `db:"time_on_page"`
Timestamp time.Time `db:"timestamp"`
ID int64 `db:"id"`
SessionID string `db:"session_id"`
Pathname string `db:"pathname"`
IsNewVisitor bool `db:"is_new_visitor"`
IsUnique bool `db:"is_unique"`
IsBounce bool `db:"is_bounce"`
Referrer string `db:"referrer"`
Duration int64 `db:"duration"`
Timestamp time.Time `db:"timestamp"`
}

View File

@ -1,17 +0,0 @@
package models
import (
"time"
)
type RawPageview struct {
ID int64 `db:"id"`
SessionID string `db:"session_id"`
Pathname string `db:"pathname"`
IsNewVisitor bool `db:"is_new_visitor"`
IsUnique bool `db:"is_unique"`
IsBounce bool `db:"is_bounce"`
Referrer string `db:"referrer"`
Duration int64 `db:"duration"`
Timestamp time.Time `db:"timestamp"`
}