moving manager over to the engine for now

This commit is contained in:
2020-07-01 20:23:22 -04:00
parent 07bbb442ef
commit 6379c73e38
11 changed files with 219 additions and 544 deletions

31
common/engine/commits.go Normal file
View File

@@ -0,0 +1,31 @@
package engine
import (
"time"
"github.com/deranjer/gvc/common/database"
)
// CreateInitialCommit copies the files over and compresses them if they are not in the NoCompress struct
func (m *Manager) CreateInitialCommit(fileList []database.File, commitMessage string) error { // ONLY HAPPENS FOR MASTER I THINK, SO NO BRANCH NEEDED
//Need to deduplicate so we aren't storing duplicates of files, storing all the files in one folder won't work, will need something like git
//For initial commit no changes are made to files, so don't store anything, just save the list so you can send to server
var initialCommit database.Commit
initialCommit.Branch = "master"
hashBytes, err := CreateCommitHash(fileList, commitMessage)
if err != nil {
return err
}
currentTime := time.Now()
initialCommit.CommitHash = hashBytes
initialCommit.Number = 1
initialCommit.TrackedFiles = fileList
initialCommit.Date = currentTime.String()
for _, file := range fileList {
go ConvertFileForStorage(&file, folder)
}
//var hashList [][]byte
return nil
}

View File

@@ -2,7 +2,12 @@ package engine
import (
"crypto/sha256"
"encoding/hex"
"fmt"
"io/ioutil"
"time"
"github.com/deranjer/gvc/common/database"
)
// UniqueFileHash uses SHA256 to create a hash of the file
@@ -16,3 +21,23 @@ func UniqueFileHash(src string) ([]byte, error) {
hash := hasher.Sum(nil)
return hash, nil
}
// CreateCommitHash creates a hash of all the files and time and commit message
func CreateCommitHash(fileList []database.File, commitMessage string) (hash []byte, err error) {
hasher := sha256.New()
for _, file := range fileList {
var err error
err = file.CalculateHash()
if err != nil {
return nil, fmt.Errorf("unable to calculate hash for file: %s with error: %s", file.Path, err)
}
hasher.Write(file.Hash[:])
}
time := time.Now() // Adding the metadata to the hash
hasher.Write([]byte(commitMessage + time.String()))
hashBytes := hasher.Sum(nil) // Getting the hash bytes
fullHash := hex.EncodeToString(hashBytes)
fmt.Println("Commit hash: ", fullHash)
return hashBytes, nil
}

View File

@@ -3,13 +3,11 @@ package engine
import (
"bytes"
"compress/gzip"
"crypto/sha256"
"encoding/binary"
"encoding/hex"
"fmt"
"io/ioutil"
"os"
"strings"
"time"
"github.com/deranjer/gvc/common/database"
)
@@ -91,30 +89,14 @@ func InitiateDirectory(directory string) {
}
}
// CreateInitialCommit copies the files over and compresses them if they are not in the NoCompress struct
func CreateInitialCommit(fileList []database.File, commitMessage string) error { // ONLY HAPPENS FOR MASTER I THINK, SO NO BRANCH NEEDED
//Need to deduplicate so we aren't storing duplicates of files, storing all the files in one folder won't work, will need something like git
//For initial commit no changes are made to files, so don't store anything, just save the list so you can send to server
var initialCommit database.Commit
initialCommit.Branch = "master"
//var hashList [][]byte
hasher := sha256.New()
for _, file := range fileList {
var err error
err = file.CalculateHash()
if err != nil {
return fmt.Errorf("unable to calculate hash for file: %s with error: %s", file.Path, err)
}
hasher.Write(file.Hash[:])
func ConvertFileForStorage(file *database.File, folder string) error {
fileBytes, err := ioutil.ReadFile(file.Path)
if err != nil {
return err
}
time := time.Now() // Adding the metadata to the hash
hasher.Write([]byte(commitMessage + time.String()))
hashBytes := hasher.Sum(nil) // Getting the hash bytes
fullHash := hex.EncodeToString(hashBytes)
fmt.Println("Commit hash: ", fullHash)
initialCommit.CommitHash = hashBytes
fmt.Println("REMOVE: ", fileBytes)
return nil
}
func IsDirectory(path string) (bool, error) {

218
common/engine/manager.go Normal file
View File

@@ -0,0 +1,218 @@
package engine
import (
"bytes"
"encoding/hex"
"fmt"
"os"
"path/filepath"
"strconv"
"strings"
"sync"
"time"
"github.com/deranjer/gvc/common/database"
"github.com/rs/zerolog"
"golang.org/x/net/context"
)
// NewManager creates a new manager interface that contains all the needed information to make changes to the repo
// rootPath is passed by client or server to let the manager know where to look for the .gvc folder and all the components needed
func NewManager(rootDir string, version string, dbPath string, informer chan OperatingMessage, dirPaths *FilePaths, log *zerolog.Logger) (*Manager, error) {
log.Info().Msg("Creating new Manager...")
// Create new patcher
patcher := Patcher{
Logger: log,
KeyFolder: dirPaths.KeyFolder,
DownloadFolder: dirPaths.DownloadFolder,
SyncFolder: dirPaths.SyncFolder,
ThumbFolder: dirPaths.ThumbFolder,
DiffFolder: dirPaths.ObjectFolder,
}
gvcDB, err := database.OpenOrCreateDB(dbPath, log)
if err != nil {
log.Fatal().Msgf("unable to create or open db: %s", err)
}
var wg *sync.WaitGroup
m := Manager{
version,
//settings,
log,
wg,
patcher,
gvcDB,
informer,
dirPaths,
}
return &m, nil
}
// CheckPaths just checks the .gvc folder structure
func CheckPaths(rootDir string) (filePaths *FilePaths, err error) {
// checking for the .gvc folder (the client (but not the server) already checks for the .gvc folder, but this checks all subdirects to make sure they are there)
rootFolder, err := filepath.Abs(rootDir)
if err != nil {
return &FilePaths{}, err
}
path := rootFolder + string(filepath.Separator) + ".gvc"
//path = filepath.Join(rootFolder, filepath.Separator+".gvc")
var fullFilePaths FilePaths
//where private and public keys are kept
fullFilePaths.KeyFolder = filepath.Join(path, "keys")
//where downloaded files start
fullFilePaths.DownloadFolder = filepath.Join(path, "downloads")
//where file originals live
fullFilePaths.SyncFolder = filepath.Join(path, "sync")
//where patches and last versions live
fullFilePaths.ObjectFolder = filepath.Join(path, "objects")
//where the thumbnails are stored
fullFilePaths.ThumbFolder = filepath.Join(path, "thumb")
//where the logs are stored
fullFilePaths.LogFolder = filepath.Join(path, "logs")
//where plugins are stored
fullFilePaths.PluginFolder = filepath.Join(path, "plugins")
InitiateDirectory(fullFilePaths.KeyFolder)
InitiateDirectory(fullFilePaths.DownloadFolder)
InitiateDirectory(fullFilePaths.SyncFolder)
InitiateDirectory(fullFilePaths.ObjectFolder)
InitiateDirectory(fullFilePaths.ThumbFolder)
InitiateDirectory(fullFilePaths.LogFolder)
InitiateDirectory(fullFilePaths.PluginFolder)
return &fullFilePaths, nil
}
// This adds a file for the watcher to keep an eye on
// however the file will also need to be backedup
// and added to the database.
// This changes all paths to absolute paths rather than relative
// when adding a file to monitor, this should check if the database
// is already expecting to monitor this file. If it is this function should
// do checks to make sure that it is successfully monitoring it, and that there
// is a historical breadcrumb trail to recreate all the versions that the database
// claims to have a copy of
func (m *Manager) AddFileToRepo(relFilePath string) error {
var err error
// the filepath should be absolute, but this should be done dynamically
// if file, err = filepath.Abs(file); err != nil {
// return "", err
// }
//TODO: what needs to happen is a channel for errors/progress is created
//then pass that channel to a routine, and put all of the following in it
// whenever an error returns, fire the string to the channel,
// or send progress on the progress channel
//however need to work out best way of returning the final result to the caller
//- the way to do that is send the result on a third channel, for which is just the result
//see commsManagment.go
// f := NewFileManager()
//DELAYED: this feature affects only large files and user experience. It can wait.
relFilePath = strings.TrimSpace(relFilePath) //purging any odd spaces TODO: Make sure not needed
var tmpFile database.File
filename := filepath.Base(relFilePath)
var hash []byte
//check that the file actually exists (currently done by client/server)
// if filename, err = VerifySrcFile(relFilePath); err != nil {
// //there was no source file or it was not recognisable as a file
// return "", err
// }
//generate a unique file name from the hash and the moment it was created
//a sampled (and therefore) fast, hash of the file for 'uniqueness'
if hash, err = UniqueFileHash(relFilePath); err != nil {
return err
}
alreadyTracked := m.dB.CheckIfFileCurrentlyMonitored(relFilePath)
if alreadyTracked {
return fmt.Errorf("file already found in tracked files, not adding: %s", relFilePath)
}
tmpFile = database.File{}
tmpFile.Hash = hash
tmpFile.Name = filename
tmpFile.Path = relFilePath
tmpFile.CreatedAt = time.Now()
tmpFile.Unique = hex.EncodeToString([]byte(filename)) + "_" + hex.EncodeToString((tmpFile.Hash)) + "_" + strconv.FormatInt(tmpFile.CreatedAt.Unix(), 10) + "_" + filename
//tmpFile.BkpLocation = filepath.Join(m.SyncFolder, tmpFile.Unique)
//tmpFile.CurrentBase = tmpFile.BkpLocation
//tmpFile.Ignore = false //we can have files in the database that are ignored. TODO: This was initially added so that 'All Files' would show up as a file (its a hack as it adds a dummy to the database)
//we should now have a unique name for this file
//if needs be, we can find out the real file name from the string
//the hash will give us a reasonable indication of the similarity of the files
//define filename of backup(s)
_, err = m.prepareDatabaseForFile(tmpFile)
if err != nil {
return err
}
m.Info().Msgf("added file: %s at path: %s with hash: %s at time: %s", filename, relFilePath, tmpFile.Hash, tmpFile.CreatedAt.String)
return nil
}
// prepareDatabaseForFile is responsible for keeping all references to the version of the file,
// the diff and the metadata of the diffs. Before any file is copied and stored, it should be managed by the database
//
// TODO: This will need to initialise a diff object in the database, currently created by the diff package,
// however going forward a diff maybe defined by the manager.
func (m *Manager) prepareDatabaseForFile(tmpFile database.File) (int, error) {
fileID, err := m.dB.InitializeFileInDatabase(tmpFile)
if err != nil {
m.Error().Msgf("Error checking if file [%s] is monitored. Error %s", tmpFile.Path, err)
return 0, err
}
return fileID, nil
}
func (m *Manager) BeginCommit(branch string, commitMessage string) error {
trackedFiles, err := m.FetchTrackedFiles()
if err != nil {
return err
}
var filesToDiff []database.File // Contains the list of files that have changed
for _, trackedFile := range trackedFiles {
currentFile, err := os.Stat(trackedFile.Path)
if err != nil {
fmt.Printf("unable to stat tracked file: %s error: %s\n", currentFile.Name(), err)
continue
}
currentFileHash, err := UniqueFileHash(trackedFile.Path)
if err != nil {
fmt.Printf("unable to create hash for file: %s error: %s\n", currentFile.Name(), err)
continue
}
result := bytes.Compare(currentFileHash, trackedFile.Hash) // Compare the hashes of the two files
if result == 0 { //If they are equal
fmt.Printf("No changes found in file: %s when compared to file: %s\n", currentFile.Name(), trackedFile.Name)
continue
}
filesToDiff = append(filesToDiff, trackedFile)
}
diffChannel := make(chan database.DiffObject)
diffContext := context.Background()
m.WaitGroup.Add(2)
commit, err := m.dB.FetchLastCommitOnBranch(branch)
if err != nil {
m.Info().Msgf("unable to fetch last commit on branch, assuming first commit on branch", err)
err := CreateInitialCommit(filesToDiff, commitMessage)
if err != nil {
m.Err(err).Msgf("unable to create initial commit: %s", err)
return err
}
}
return nil
}
func (m *Manager) FetchCommitByNumber(branch string, commitNumber string) error {
return nil
}
// FetchTrackedFiles just grabbes all the files currently tracked in the repo
func (m *Manager) FetchTrackedFiles() ([]database.File, error) {
files, err := m.dB.RetrieveTrackedFiles()
if err != nil {
return nil, fmt.Errorf("unable to retrieve tracked files: %s", err)
}
return files, nil
}

View File

@@ -1,8 +1,14 @@
package engine
import (
"os/user"
"sync"
"time"
watcher "github.com/radovskyb/watcher"
"github.com/rs/zerolog"
database "github.com/deranjer/gvc/common/database"
)
type FileWatcher struct {
@@ -25,3 +31,124 @@ type Patcher struct {
ThumbFolder string
DiffFolder string
}
type Manager struct {
Version string //What version of the client or server are we using
//Settings *UserSettings
*zerolog.Logger
*sync.WaitGroup
//watcher engine.FileWatcher
patcher Patcher
dB *database.DB
Informer chan OperatingMessage
*FilePaths
//ProgressCommunicator io.WriteCloser
}
type CustomPlugin interface {
Init()
Name() string
Description() string
}
//FilePaths holds the full paths to all the relevant folders
type FilePaths struct {
KeyFolder string
DownloadFolder string
SyncFolder string
ThumbFolder string
ObjectFolder string
LogFolder string
PluginFolder string
}
// type PluginManager struct {
// engine *qml.QQmlApplicationEngine
// informer chan OperatingMessage
// path string
// plugins []string
// }
type UserSettings struct {
Usr user.User
versionFormat string
darkMode bool
licenseKey string
override bool
machineID string
//systemSettings engine.UXSettings
}
type VersioningFormat struct {
bigVersion int64
littleVersion int64
microVersion int64
currentTime time.Time
client string
job string
userId string
owner string
hash string
message string
}
// this should enumerate certain message types that the front end can retrieve
// over a channel. the manager will output certain message types at certain times.
// OpCode is a type that is used to describe what type
// of event has occurred during the management process.
type OpCode uint32
type OperatingMessage struct {
Code OpCode
data string
CustomField string
}
func (op *OperatingMessage) Custom() string {
if op.CustomField != "" {
return op.CustomField
}
return op.data
}
// Ops
const (
OpNewDiff OpCode = iota
OpNewFile
OpNewBase
OpWatchCommencing
OpWatchStopped
OpMessage
OpEnablingPlugin
OpPluginEnabled
OpPluginError
OpNone
)
var ops = map[OpCode]OperatingMessage{
OpNewDiff: {OpNewDiff, "New diff created", ""},
OpNewFile: {OpNewFile, "New file created", ""},
OpNewBase: {OpNewBase, "New base created", ""},
//OpWatchCommencing: {Op_WatchCommencing, "File watching has started", ""},
//OpWatchStopped: {Op_WatchStopped, "File watching has stopped", ""},
OpMessage: {OpMessage, "Custom message attached - ", ""},
OpEnablingPlugin: {OpEnablingPlugin, "Enabling Plugin - ", ""},
OpPluginEnabled: {OpPluginEnabled, "Plugin Enabled", ""},
OpPluginError: {OpPluginError, "Error enabling plugin", ""},
OpNone: {OpNone, "No error code known", ""},
}
// String prints the string version of the Op consts
func (e OpCode) String() string {
if op, found := ops[e]; found {
return op.data
}
return "???"
}
func (e OpCode) Retrieve() OperatingMessage {
if op, found := ops[e]; found {
return op
}
return ops[OpNone]
}