Skip to content
Snippets Groups Projects
Commit dea02ef9 authored by Ivanov, Ivan (UG - Computer Science)'s avatar Ivanov, Ivan (UG - Computer Science)
Browse files

Merge branch 'implementation' into 'master'

Added Readme Testing works as well as Makefile for easier use.

See merge request !2
parents d35db8ca d96be26b
No related branches found
No related tags found
1 merge request!2Added Readme Testing works as well as Makefile for easier use.
LICENSE 0 → 100644
MIT License
Copyright (c) 2021 Ivanov, Ivan (UG - Computer Science)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
build:
go get
run:
go run . $(file)
test:
go test
\ No newline at end of file
# json-utility
# JSON Utility
## Usage
```
$ make build
$ make run file=example.json #where example.json is the file we want to process
```
## Testing
```
$ make test
```
## Dependencies
- You'd need to have GOPATH exported. GOBIN for MACOS for go get command
\ No newline at end of file
package main
import (
"encoding/json"
"log"
"os"
"sync"
)
//Handles opens the JSON file and calls extractJSON to parse
//the data. Returns the result as a struct.
func Handles(argument string) (urls *URLs, err error) {
jsonFile, err := os.Open(argument)
if err != nil {
log.Println(err)
return nil, err
}
log.Println("Successfully Opened", argument)
//Close when done.
defer jsonFile.Close()
result := ExctractJSON(jsonFile)
return result, err
}
func PrintResult(result *URLs) {
for k, v := range result.URL {
log.Println("For URL: ", k)
for m, x := range v.Date {
log.Println("↳", m, "there are:", x.Counter, "unique visits.")
}
}
}
func ExctractJSON(file *os.File) (urls *URLs) {
d := json.NewDecoder(file)
var loads []Load
visited := URLs{URL: make(map[string]*Dates)}
err := d.Decode(&loads)
if err != nil {
log.Panic(err)
return
}
wg := new(sync.WaitGroup)
for _, load := range loads {
if load.TypeRequest != "GET" {
log.Println("Request is invalid: ", load.TypeRequest)
continue
}
wg.Add(1)
go Process(&visited, load, wg)
}
wg.Wait()
return &visited
}
package main
import (
"fmt"
"log"
"testing"
)
/*
Standard assert Equals function
*/
func assertEqual(t *testing.T, a interface{}, b interface{}, message string) {
if a == b {
return
}
if len(message) == 0 {
message = fmt.Sprintf("%v != %v", a, b)
}
t.Fatal(message)
}
/*
Testing with sample JSON tha websites are visited.
*/
func TestWebsites(t *testing.T) {
result, err := Handles("test/test-websites.json")
if err != nil {
log.Println(err)
return
}
assertEqual(t, len(result.URL), 1, "")
}
/*
Testing if a user visits one URL multiple times but in
different days if it's still going to add it.
*/
func TestWebsites2(t *testing.T) {
result, err := Handles("test/test-websites2.json")
if err != nil {
log.Println(err)
return
}
assertEqual(t, len(result.URL), 1, "")
}
/*
Testing if each date gets added. Basic case. 1 user visits 1 page on
2 different dates. 2 Dates should be returned. Also checks if POST
requests get dropped.
*/
func TestDates(t *testing.T) {
result, err := Handles("test/test-dates.json")
if err != nil {
log.Println(err)
return
}
assertEqual(t, len(result.URL["http://www.someamazingwebsite.com/1"].Date), 2, "")
}
/*
Testing if each date gets added. Advaced case. 3 user visits 2 page on
3 different dates. 2 Dates should be returned for each website.
*/
func TestDates2(t *testing.T) {
result, err := Handles("test/test-dates2.json")
if err != nil {
log.Println(err)
return
}
assertEqual(t, len(result.URL["http://www.someamazingwebsite.com/1"].Date), 2, "")
assertEqual(t, len(result.URL["http://www.someamazingwebsite.com/2"].Date), 2, "")
}
File added
/*
Example payload:
userid: A unique ID representing the user
url : The URL the visitor has accessed
type: The HTTP method used to access the URL
timestamp: The timestamp for when the action occurred
*/
// Package implments JSON parser utility to process a stream of JSON messages
// and calculate the number of unique viewers per day for each URL in the stream.
package main
import (
......@@ -15,23 +10,23 @@ import (
type Load struct {
Userid string `json:"userid"`
Url string `json:"url"`
URL string `json:"url"`
TypeRequest string `json:"type"`
Timestamp int64 `json:"timestamp"`
}
type URLs struct {
URL map[string]*Dates
mux sync.RWMutex
URL map[string]*Dates //map holding all the Dates a certain URL is visited
mux sync.RWMutex //mutex for race condition
}
type Dates struct {
Exists map[string]*Visits
Date map[string]*Visits //map holding all visits for certain date
}
type Visits struct {
mux sync.RWMutex
Counter int
UserIds map[string]bool
mux sync.RWMutex //mutex for race condition
Counter int //Counter for unique visits
UserIds map[string]bool //map holding different user ids
}
func main() {
......@@ -40,27 +35,12 @@ func main() {
log.Println("USAGE: ./json-utility [path/to/file]")
return
}
openFile(os.Args[1])
}
func openFile(argument string) {
jsonFile, err := os.Open(argument)
//Get the result from a file.
result, err := Handles(os.Args[1])
if err != nil {
log.Println(err)
return
}
log.Println("Successfully Opened", argument)
//Close when done.
defer jsonFile.Close()
result := handler(jsonFile)
for k, v := range result.URL {
log.Println("For URL: ", k)
for m, x := range v.Exists {
log.Println("↳", m, "there are:", x.Counter, "unique visits.")
}
}
//Print the result
PrintResult(result)
}
package main
import (
"log"
"sync"
"time"
)
//Process handles processing a load given by
func Process(visited *URLs, load Load, wg *sync.WaitGroup) {
//Notify main that this routine is done.
defer wg.Done()
//Time should be accepted both in epoch milliseconds or seconds.
var t string
if time.Now().UnixNano() > load.Timestamp {
t = time.Unix(load.Timestamp/1000, 0).Format("2006-01-02")
} else {
t = time.Unix(load.Timestamp, 0).Format("2006-01-02")
}
visited.mux.Lock()
if date, ok := visited.URL[load.URL]; ok {
log.Println("URL exists.")
if visit, ok := date.Date[t]; ok {
log.Println("Date exists.")
if visit.UserIds[load.Userid] {
visited.mux.Unlock()
return //Return as the user has already visited that website for this day once.
} else {
visit.mux.Lock()
visit.Counter += 1
visit.UserIds[load.Userid] = true
visited.mux.Unlock()
visit.mux.Unlock()
return
}
} else {
//Add the Date and a Visit.
entry := Visits{Counter: 0, UserIds: make(map[string]bool)}
entry.mux.Lock()
entry.Counter += 1
entry.UserIds[load.Userid] = true
entry.mux.Unlock()
visited.URL[load.URL].Date[t] = &entry
visited.mux.Unlock()
log.Println("Date and Visit are added.")
return
}
} else {
log.Println("URL doesn't exist.")
//Initialising URL, date and visits for that date.
entry := Visits{Counter: 0, UserIds: make(map[string]bool)}
entry.mux.Lock()
dates := Dates{Date: make(map[string]*Visits)}
entry.Counter += 1
entry.UserIds[load.Userid] = true
entry.mux.Unlock()
dates.Date[t] = &entry
visited.URL[load.URL] = &dates
visited.mux.Unlock()
return
}
}
[
{
"userid": "user1",
"url": "http://www.someamazingwebsite.com/1",
"type": "GET",
"timestamp": 1360662163000
},
{
"userid": "user2",
"url": "http://www.someamazingwebsite.com/1",
"type": "GET",
"timestamp": 1160165163000
}
]
\ No newline at end of file
[
{
"userid": "user1",
"url": "http://www.someamazingwebsite.com/1",
"type": "GET",
"timestamp": 1360662163000
},
{
"userid": "user2",
"url": "http://www.someamazingwebsite.com/3",
"type": "GET",
"timestamp": 1160165163000
},
{
"userid": "user3",
"url": "http://www.someamazingwebsite.com/1",
"type": "GET",
"timestamp": 1358122163000
},
{
"userid": "user1",
"url": "http://www.someamazingwebsite.com/2",
"type": "GET",
"timestamp": 1360662163000
},
{
"userid": "user2",
"url": "http://www.someamazingwebsite.com/2",
"type": "GET",
"timestamp": 1160165163000
},
{
"userid": "user3",
"url": "http://www.someamazingwebsite.com/2",
"type": "GET",
"timestamp": 1358122163000
}
]
\ No newline at end of file
[
{
"userid": "user1",
"url": "http://www.someamazingwebsite.com/1",
"type": "POST",
"timestamp": 1360662163000
},
{
"userid": "user2",
"url": "http://www.someamazingwebsite.com/1",
"type": "GET",
"timestamp": 1360662163000
}
]
\ No newline at end of file
[
{
"userid": "user2",
"url": "http://www.someamazingwebsite.com/1",
"type": "GET",
"timestamp": 1360662163000
},
{
"userid": "user2",
"url": "http://www.someamazingwebsite.com/1",
"type": "GET",
"timestamp": 1360662163000
},
{
"userid": "user2",
"url": "http://www.someamazingwebsite.com/1",
"type": "GET",
"timestamp": 1310666163000
}
]
\ No newline at end of file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment