working on pulling settings from file using viper, finished basic RSS feed and cron job

This commit is contained in:
2018-01-10 20:07:00 -05:00
parent 08b3a14576
commit f079a5f067
18 changed files with 30486 additions and 28934 deletions

View File

@@ -1,46 +0,0 @@
package main
import (
"fmt"
"github.com/anacrolix/torrent"
"time"
)
// ClientError formats errors coming from the client.
type ClientError struct {
Type string
Origin error
}
func (clientError ClientError) Error() string {
return fmt.Sprintf("Error %s: %s\n", clientError.Type, clientError.Origin)
}
type ClientConfig struct {
TorrentPath string
Port int
TorrentPort int
Seed bool
TCP bool
MaxConnections int
DownloadDir string
}
type Client struct {
Client *torrent.Client
Torrent *torrent.Torrent
Name string
Progress int64
Status string
Seeds int
Peers int
DownloadSpeed int64
UploadSpeed int64
ETA time.Duration
Ratio int
Avail int
Config ClientConfig
}

View File

@@ -1,59 +1,101 @@
[serverConfig]
ServerPort: 8000
ServerAddr: "" #blank will bind to localhost
[torrentClientConfig] [torrentClientConfig]
DataDir = "downloads" #the full OR relative path of the default download directory for torrents DownloadDir = "downloads" #the full OR relative path of the default download directory for torrents
#The address to listen for new uTP and TCP bittorrent protocolconnections. DHT shares a UDP socket with uTP unless configured otherwise.
ListenAddr = "" #Leave Blank for default, syntax "HOST:PORT"
#Don't announce to trackers. This only leaves DHT to discover peers.
DisableTrackers = false #boolean
DisablePEX = false # boolean
# Don't create a DHT.
NoDHT = false #boolean
# Overrides the default DHT configuration, see dhtServerConfig
DHTConfig = false # boolean, set to true and edit dhtServerConfig table to utilize
# Never send chunks to peers.
NoUpload = false #boolean
#seed after download
Seed = true #boolean
# Events are data bytes sent in pieces. The burst must be large enough to fit a whole chunk.
UploadRateLimiter = "" #*rate.Limiter
#The events are bytes read from connections. The burst must be biggerthan the largest Read performed on a Conn minus one. This is likely to
#be the larger of the main read loop buffer (~4096), and the requested chunk size (~16KiB).
DownloadRateLimiter = "" #*rate.Limiter
#User-provided Client peer ID. If not present, one is generated automatically.
PeerID = "" #string
#For the bittorrent protocol.
DisableUTP = false #bool
#For the bittorrent protocol.
DisableTCP = false #bool
#Called to instantiate storage for each added torrent. Builtin backends
# are in the storage package. If not set, the "file" implementation is used.
DefaultStorage = dht.ServerConfig #storage.ClientImpl
#encryption policy #The address to listen for new uTP and TCP bittorrent protocolconnections. DHT shares a UDP socket with uTP unless configured otherwise.
IPBlocklist = "" #iplist.Ranger ListenAddr = "" #Leave Blank for default, syntax "HOST:PORT"
DisableIPv6 = false #boolean
Debug = false #boolean
#Don't announce to trackers. This only leaves DHT to discover peers.
DisableTrackers = false #boolean
DisablePEX = false # boolean
# Don't create a DHT.
NoDHT = false #boolean
# Never send chunks to peers.
NoUpload = false #boolean
#seed after download
Seed = true #boolean
# Events are data bytes sent in pieces. The burst must be large enough to fit a whole chunk.
UploadRateLimiter = "" #*rate.Limiter
#The events are bytes read from connections. The burst must be biggerthan the largest Read performed on a Conn minus one. This is likely to
#be the larger of the main read loop buffer (~4096), and the requested chunk size (~16KiB).
DownloadRateLimiter = "" #*rate.Limiter
#User-provided Client peer ID. If not present, one is generated automatically.
PeerID = "" #string
#For the bittorrent protocol.
DisableUTP = false #bool
#For the bittorrent protocol.
DisableTCP = false #bool
#Called to instantiate storage for each added torrent. Builtin backends
# are in the storage package. If not set, the "file" implementation is used.
DefaultStorage = "storage.ClientImpl"
#encryption policy
IPBlocklist = "" #of type iplist.Ranger
DisableIPv6 = false #boolean
Debug = false #boolean
#HTTP *http.Client
HTTPUserAgent = "" # HTTPUserAgent changes default UserAgent for HTTP requests
ExtendedHandshakeClientVersion = ""
Bep20 = ""
# Overrides the default DHT configuration, see dhtServerConfig #advanced.. so be careful
DHTConfig = "" # default is "dht.ServerConfig"
[EncryptionPolicy]
DisableEncryption = false
ForceEncryption = false
PreferNoEncryption = true
[dhtServerConfig] [dhtServerConfig]
# Set NodeId Manually. Caller must ensure that if NodeId does not conform to DHT Security Extensions, that NoSecurity is also set. # Set NodeId Manually. Caller must ensure that if NodeId does not conform to DHT Security Extensions, that NoSecurity is also set.
NodeId = "" #[20]byte NodeId = "" #[20]byte
Conn = "" # https://godoc.org/net#PacketConn #not implemented
# Don't respond to queries from other nodes.
Passive = false # boolean
# the default addressses are "router.utorrent.com:6881","router.bittorrent.com:6881","dht.transmissionbt.com:6881","dht.aelitis.com:6881",
#https://github.com/anacrolix/dht/blob/master/dht.go
StartingNodes = "dht.GlobalBootstrapAddrs"
#Disable the DHT security extension: http://www.libtorrent.org/dht_sec.html.
NoSecurity = false
#Initial IP blocklist to use. Applied before serving and bootstrapping begins.
IPBlocklist = "" #iplist.Ranger
#Used to secure the server's ID. Defaults to the Conn's LocalAddr(). Set to the IP that remote nodes will see,
#as that IP is what they'll use to validate our ID.
PublicIP = "" #net.IP
#Hook received queries. Return true if you don't want to propagate to the default handlers. Conn = "" # https:#godoc.org/net#PacketConn #not implemented
OnQuery = "func(query *krpc.Msg, source net.Addr) (propagate bool)"
#Called when a peer successfully announces to us. # Don't respond to queries from other nodes.
OnAnnouncePeer = "func(infoHash metainfo.Hash, peer Peer)" Passive = false # boolean
#How long to wait before resending queries that haven't received a response. Defaults to a random value between 4.5 and 5.5s.
QueryResendDelay = "func() time.Duration" # the default addressses are "router.utorrent.com:6881","router.bittorrent.com:6881","dht.transmissionbt.com:6881","dht.aelitis.com:6881",
#https:#github.com/anacrolix/dht/blob/master/dht.go
StartingNodes = "dht.GlobalBootstrapAddrs"
#Disable the DHT security extension: http:#www.libtorrent.org/dht_sec.html.
NoSecurity = false
#Initial IP blocklist to use. Applied before serving and bootstrapping begins.
IPBlocklist = "" #of type iplist.Ranger
#Used to secure the server's ID. Defaults to the Conn's LocalAddr(). Set to the IP that remote nodes will see,
#as that IP is what they'll use to validate our ID.
PublicIP = "" #net.IP
#Hook received queries. Return true if you don't want to propagate to the default handlers.
OnQuery = "func(query *krpc.Msg, source net.Addr) (propagate bool)"
#Called when a peer successfully announces to us.
OnAnnouncePeer = "func(infoHash metainfo.Hash, peer Peer)"
#How long to wait before resending queries that haven't received a response. Defaults to a random value between 4.5 and 5.5s.
QueryResendDelay = "func() time.Duration"

View File

@@ -17,6 +17,19 @@ type Message struct {
//Next are the messages the server sends to the client //Next are the messages the server sends to the client
//RSSJSONList is a slice of gofeed.Feeds sent to the client
type RSSJSONList struct {
MessageType string
TotalRSSFeeds int
RSSFeeds []RSSFeedsNames //strings of the full rss feed
}
//RSSFeedsNames stores all of the feeds by name and with URL
type RSSFeedsNames struct {
RSSName string
RSSFeedURL string
}
//TorrentList struct contains the torrent list that is sent to the client //TorrentList struct contains the torrent list that is sent to the client
type TorrentList struct { //helps create the JSON structure that react expects to recieve type TorrentList struct { //helps create the JSON structure that react expects to recieve
MessageType string `json:"MessageType"` MessageType string `json:"MessageType"`
@@ -74,4 +87,6 @@ type ClientDB struct { //TODO maybe seperate out the internal bits into another
DataBytesRead int64 //Internal used for calculating dl speed DataBytesRead int64 //Internal used for calculating dl speed
UpdatedAt time.Time //Internal used for calculating speeds of upload and download UpdatedAt time.Time //Internal used for calculating speeds of upload and download
TorrentHash metainfo.Hash //Used to create string for TorrentHashString... not sure why I have it... make that a TODO I guess TorrentHash metainfo.Hash //Used to create string for TorrentHashString... not sure why I have it... make that a TODO I guess
NumberofFiles int
NumberofPieces int
} }

48
engine/cronJobs.go Normal file
View File

@@ -0,0 +1,48 @@
package engine
import (
"fmt"
"github.com/asdine/storm"
Storage "github.com/deranjer/goTorrent/storage"
"github.com/mmcdole/gofeed"
"github.com/robfig/cron"
)
//InitializeCronEngine initializes and starts the cron engine so we can add tasks as needed, returns pointer to the engine
func InitializeCronEngine() *cron.Cron { //TODO add a cron to inspect cron jobs and log the outputs
c := cron.New()
c.Start()
return c
}
//RefreshRSSCron refreshes all of the RSS feeds on an hourly basis
func RefreshRSSCron(c *cron.Cron, db *storm.DB) {
c.AddFunc("@hourly", func() {
RSSFeedStore := Storage.FetchRSSFeeds(db)
singleRSSTorrent := Storage.SingleRSSTorrent{}
newFeedStore := Storage.RSSFeedStore{ID: RSSFeedStore.ID} //creating a new feed store just using old one to parse for new torrents
fp := gofeed.NewParser()
for _, singleFeed := range RSSFeedStore.RSSFeeds {
feed, err := fp.ParseURL(singleFeed.URL)
if err != nil {
fmt.Println("Unable to parse URL", singleFeed.URL, err)
}
for _, RSSTorrent := range feed.Items {
singleRSSTorrent.Link = RSSTorrent.Link
singleRSSTorrent.Title = RSSTorrent.Title
singleRSSTorrent.PubDate = RSSTorrent.Published
singleFeed.Torrents = append(singleFeed.Torrents, singleRSSTorrent)
}
newFeedStore.RSSFeeds = append(newFeedStore.RSSFeeds, singleFeed)
}
Storage.UpdateRSSFeeds(db, newFeedStore) //Calling this to fully update storage will all rss feeds
})
}
//LogCronStatus prints out the status of the cron jobs to the log
func LogCronStatus(c *cron.Cron) {
}

View File

@@ -2,6 +2,7 @@ package engine //main file for all the calculations and data gathering needed fo
import ( import (
"fmt" "fmt"
"io/ioutil"
"os" "os"
"strconv" "strconv"
"strings" "strings"
@@ -11,9 +12,58 @@ import (
"github.com/anacrolix/torrent/metainfo" "github.com/anacrolix/torrent/metainfo"
"github.com/asdine/storm" "github.com/asdine/storm"
Storage "github.com/deranjer/goTorrent/storage" Storage "github.com/deranjer/goTorrent/storage"
"github.com/mmcdole/gofeed"
) )
func timeOutInfo(clientTorrent *torrent.Torrent, seconds time.Duration) (deleted bool) { //forcing a timeout of the torrent if it doesn't load from program restart //RefreshSingleRSSFeed refreshing a single RSS feed to send to the client (so no updating database) mainly by updating the torrent list to display any changes
func RefreshSingleRSSFeed(db *storm.DB, RSSFeed Storage.SingleRSSFeed) Storage.SingleRSSFeed { //Todo.. duplicate as cron job... any way to merge these to reduce duplication?
singleRSSFeed := Storage.SingleRSSFeed{URL: RSSFeed.URL, Name: RSSFeed.Name}
singleRSSTorrent := Storage.SingleRSSTorrent{}
fp := gofeed.NewParser()
feed, err := fp.ParseURL(RSSFeed.URL)
if err != nil {
fmt.Println("Unable to parse URL", RSSFeed.URL, err)
}
for _, RSSTorrent := range feed.Items {
singleRSSTorrent.Link = RSSTorrent.Link
singleRSSTorrent.Title = RSSTorrent.Title
singleRSSTorrent.PubDate = RSSTorrent.Published
singleRSSFeed.Torrents = append(singleRSSFeed.Torrents, singleRSSTorrent)
}
return singleRSSFeed
}
//ForceRSSRefresh forces a refresh (in addition to the cron schedule) to add the new RSS feed
func ForceRSSRefresh(db *storm.DB, RSSFeedStore Storage.RSSFeedStore) { //Todo.. duplicate as cron job... any way to merge these to reduce duplication?
singleRSSTorrent := Storage.SingleRSSTorrent{}
newFeedStore := Storage.RSSFeedStore{ID: RSSFeedStore.ID} //creating a new feed store just using old one to parse for new torrents
fp := gofeed.NewParser()
fmt.Println("Length of RSS feeds (should be ONE)", len(RSSFeedStore.RSSFeeds))
for _, singleFeed := range RSSFeedStore.RSSFeeds {
feed, err := fp.ParseURL(singleFeed.URL)
if err != nil {
fmt.Println("Unable to parse URL", singleFeed.URL, err)
}
fmt.Println("SingleFeed is: ", singleFeed)
for _, RSSTorrent := range feed.Items {
singleRSSTorrent.Link = RSSTorrent.Link
singleRSSTorrent.Title = RSSTorrent.Title
singleRSSTorrent.PubDate = RSSTorrent.Published
singleFeed.Torrents = append(singleFeed.Torrents, singleRSSTorrent)
}
newFeedStore.RSSFeeds = append(newFeedStore.RSSFeeds, singleFeed)
}
fmt.Println("ABOUT TO WRITE TO DB", newFeedStore.RSSFeeds)
Storage.UpdateRSSFeeds(db, newFeedStore) //Calling this to fully update storage will all rss feeds
}
//timeOutInfo forcing a timeout of the torrent if it doesn't load from program restart
func timeOutInfo(clientTorrent *torrent.Torrent, seconds time.Duration) (deleted bool) {
fmt.Println("Attempting to pull information for torrent... ", clientTorrent.Name())
timeout := make(chan bool, 1) //creating a timeout channel for our gotinfo timeout := make(chan bool, 1) //creating a timeout channel for our gotinfo
go func() { go func() {
time.Sleep(seconds * time.Second) time.Sleep(seconds * time.Second)
@@ -21,11 +71,11 @@ func timeOutInfo(clientTorrent *torrent.Torrent, seconds time.Duration) (deleted
}() }()
select { select {
case <-clientTorrent.GotInfo(): //attempting to retrieve info for torrent case <-clientTorrent.GotInfo(): //attempting to retrieve info for torrent
fmt.Println("Recieved torrent info for...", clientTorrent.Name()) //fmt.Println("Recieved torrent info for...", clientTorrent.Name())
clientTorrent.DownloadAll() clientTorrent.DownloadAll()
return false return false
case <-timeout: // getting info for torrent has timed out so purging the torrent case <-timeout: // getting info for torrent has timed out so purging the torrent
fmt.Println("Dropping Torrent") fmt.Println("Dropping Torrent from information timeout...", clientTorrent.Name())
clientTorrent.Drop() clientTorrent.Drop()
return true return true
} }
@@ -33,23 +83,37 @@ func timeOutInfo(clientTorrent *torrent.Torrent, seconds time.Duration) (deleted
} }
//StartTorrent creates the storage.db entry and starts A NEW TORRENT and adds to the running torrent array //StartTorrent creates the storage.db entry and starts A NEW TORRENT and adds to the running torrent array
func StartTorrent(clientTorrent *torrent.Torrent, torrentLocalStorage Storage.TorrentLocal, torrentDbStorage *storm.DB, dataDir string, torrentFile string, torrentFileName string) { func StartTorrent(clientTorrent *torrent.Torrent, torrentLocalStorage Storage.TorrentLocal, torrentDbStorage *storm.DB, dataDir string, torrentType string, torrentFileName string) {
timeOutInfo(clientTorrent, 45) //seeing if adding the torrrent times out (giving 45 seconds) timeOutInfo(clientTorrent, 45) //seeing if adding the torrrent times out (giving 45 seconds)
var TempHash metainfo.Hash var TempHash metainfo.Hash
TempHash = clientTorrent.InfoHash() TempHash = clientTorrent.InfoHash()
fmt.Println(clientTorrent.Info().Source) fmt.Println(clientTorrent.Info().Source)
torrentLocalStorage.Hash = TempHash.String() // we will store the infohash to add it back later on client restart (if needed) torrentLocalStorage.Hash = TempHash.String() // we will store the infohash to add it back later on client restart (if needed)
torrentLocalStorage.InfoBytes = clientTorrent.Metainfo().InfoBytes
torrentLocalStorage.DateAdded = time.Now().Format("Jan _2 2006") torrentLocalStorage.DateAdded = time.Now().Format("Jan _2 2006")
torrentLocalStorage.StoragePath = dataDir //TODO check full path information for torrent storage torrentLocalStorage.StoragePath = dataDir //TODO check full path information for torrent storage
torrentLocalStorage.TorrentName = clientTorrent.Name() torrentLocalStorage.TorrentName = clientTorrent.Name()
torrentLocalStorage.TorrentStatus = "downloading" //by default start all the torrents as downloading. torrentLocalStorage.TorrentStatus = "Running" //by default start all the torrents as downloading.
torrentLocalStorage.TorrentType = torrentFile //either "file" or "magnet" maybe more in the future torrentLocalStorage.TorrentType = torrentType //either "file" or "magnet" maybe more in the future
if torrentFile == "file" { if torrentType == "file" { //if it is a file read the entire file into the database for us to spit out later
torrentLocalStorage.TorrentFileName = torrentFileName torrentLocalStorage.TorrentFileName = torrentFileName
} else { torrentfile, err := ioutil.ReadFile(torrentFileName)
torrentLocalStorage.TorrentFileName = "" if err != nil {
fmt.Println("Unable to read the torrent file...")
}
torrentLocalStorage.TorrentFile = torrentfile //storing the entire file in to database
} }
torrentFiles := clientTorrent.Files() //storing all of the files in the database along with the priority
var TorrentFilePriorityArray = []Storage.TorrentFilePriority{}
for _, singleFile := range torrentFiles { //creating the database setup for the file array
var torrentFilePriority = Storage.TorrentFilePriority{}
torrentFilePriority.TorrentFilePath = singleFile.DisplayPath()
torrentFilePriority.TorrentFilePriority = "Normal"
TorrentFilePriorityArray = append(TorrentFilePriorityArray, torrentFilePriority)
}
torrentLocalStorage.TorrentFilePriority = TorrentFilePriorityArray
fmt.Printf("%+v\n", torrentLocalStorage) fmt.Printf("%+v\n", torrentLocalStorage)
Storage.AddTorrentLocalStorage(torrentDbStorage, torrentLocalStorage) //writing all of the data to the database Storage.AddTorrentLocalStorage(torrentDbStorage, torrentLocalStorage) //writing all of the data to the database
clientTorrent.DownloadAll() //starting the download clientTorrent.DownloadAll() //starting the download
@@ -64,12 +128,27 @@ func CreateRunningTorrentArray(tclient *torrent.Client, TorrentLocalArray []*Sto
if element.TorrentType == "file" { //if it is a file pull it from the uploaded torrent folder if element.TorrentType == "file" { //if it is a file pull it from the uploaded torrent folder
//fmt.Println("Filename", element.TorrentFileName) //fmt.Println("Filename", element.TorrentFileName)
tempFile, err := ioutil.TempFile("", "TorrentFileTemp")
if err != nil {
fmt.Println("Unable to create a temp file for adding file torrent in", err)
}
defer os.Remove(tempFile.Name())
if _, err := tempFile.Write(element.TorrentFile); err != nil {
fmt.Println("Unable to write to the temp file...", err)
}
if err := tempFile.Close(); err != nil {
fmt.Println("Error closing Temp file", err)
}
singleTorrent, _ = tclient.AddTorrentFromFile(tempFile.Name())
if _, err := os.Stat(element.TorrentFileName); err == nil { //if we CAN find the torrent, add it if _, err := os.Stat(element.TorrentFileName); err == nil { //if we CAN find the torrent, add it
//fmt.Println("Adding file name...", element.TorrentFileName) //fmt.Println("Adding file name...", element.TorrentFileName)
singleTorrent, _ = tclient.AddTorrentFromFile(element.TorrentFileName) singleTorrent, _ = tclient.AddTorrentFromFile(element.TorrentFileName)
} else { //if we cant find the torrent delete it } else { //if we cant find the torrent delete it
fmt.Println("File Error", err) fmt.Println("File Error", err)
Storage.DelTorrentLocalStorage(db, element) Storage.DelTorrentLocalStorage(db, element.Hash)
continue continue
} }
@@ -77,11 +156,17 @@ func CreateRunningTorrentArray(tclient *torrent.Client, TorrentLocalArray []*Sto
elementMagnet := "magnet:?xt=urn:btih:" + element.Hash //For magnet links just need to prepend the magnet part to the hash to readd elementMagnet := "magnet:?xt=urn:btih:" + element.Hash //For magnet links just need to prepend the magnet part to the hash to readd
singleTorrent, _ = tclient.AddMagnet(elementMagnet) singleTorrent, _ = tclient.AddMagnet(elementMagnet)
} }
var TempHash metainfo.Hash
TempHash = singleTorrent.InfoHash()
timeOut := timeOutInfo(singleTorrent, 45) singleTorrentStorageInfo := Storage.FetchTorrentFromStorage(db, TempHash.String())
singleTorrent.SetInfoBytes(singleTorrentStorageInfo.InfoBytes) //setting the infobytes back into the torrent
/* timeOut := timeOutInfo(singleTorrent, 45) //Shouldn't need this anymore as we pull in the infohash from the database
if timeOut == true { // if we did timeout then drop the torrent from the boltdb database if timeOut == true { // if we did timeout then drop the torrent from the boltdb database
Storage.DelTorrentLocalStorage(db, element) //purging torrent from the local database Storage.DelTorrentLocalStorage(db, element.Hash) //purging torrent from the local database
} continue
} */
fullClientDB := new(ClientDB) fullClientDB := new(ClientDB)
fullStruct := singleTorrent.Stats() fullStruct := singleTorrent.Stats()
@@ -97,10 +182,7 @@ func CreateRunningTorrentArray(tclient *torrent.Client, TorrentLocalArray []*Sto
} }
activePeersString := strconv.Itoa(fullStruct.ActivePeers) //converting to strings activePeersString := strconv.Itoa(fullStruct.ActivePeers) //converting to strings
totalPeersString := fmt.Sprintf("%v", fullStruct.TotalPeers) totalPeersString := fmt.Sprintf("%v", fullStruct.TotalPeers)
var TempHash metainfo.Hash //fetching all the info from the database
TempHash = singleTorrent.InfoHash()
singleTorrentStorageInfo := Storage.FetchTorrentFromStorage(db, TempHash.String()) //fetching all the info from the database
var torrentTypeTemp string var torrentTypeTemp string
torrentTypeTemp = singleTorrentStorageInfo.TorrentType //either "file" or "magnet" maybe more in the future torrentTypeTemp = singleTorrentStorageInfo.TorrentType //either "file" or "magnet" maybe more in the future
if torrentTypeTemp == "file" { if torrentTypeTemp == "file" {
@@ -110,8 +192,8 @@ func CreateRunningTorrentArray(tclient *torrent.Client, TorrentLocalArray []*Sto
} }
fullClientDB.StoragePath = singleTorrentStorageInfo.StoragePath //grabbed from database fullClientDB.StoragePath = singleTorrentStorageInfo.StoragePath //grabbed from database
totalSizeHumanized := HumanizeBytes(float32(singleTorrent.BytesCompleted())) //convert size to GB if needed downloadedSizeHumanized := HumanizeBytes(float32(singleTorrent.BytesCompleted())) //convert size to GB if needed
downloadedSizeHumanized := HumanizeBytes(float32(singleTorrent.Length())) totalSizeHumanized := HumanizeBytes(float32(singleTorrent.Length()))
//grabbed from torrent client //grabbed from torrent client
fullClientDB.DownloadedSize = downloadedSizeHumanized fullClientDB.DownloadedSize = downloadedSizeHumanized
@@ -127,6 +209,7 @@ func CreateRunningTorrentArray(tclient *torrent.Client, TorrentLocalArray []*Sto
fullClientDB.TorrentName = element.TorrentName fullClientDB.TorrentName = element.TorrentName
fullClientDB.DateAdded = element.DateAdded fullClientDB.DateAdded = element.DateAdded
fullClientDB.BytesCompleted = singleTorrent.BytesCompleted() fullClientDB.BytesCompleted = singleTorrent.BytesCompleted()
fullClientDB.NumberofFiles = len(singleTorrent.Files())
CalculateTorrentETA(singleTorrent, fullClientDB) //calculating the ETA for the torrent CalculateTorrentETA(singleTorrent, fullClientDB) //calculating the ETA for the torrent
fullClientDB.TotalUploadedBytes = singleTorrentStorageInfo.UploadedBytes fullClientDB.TotalUploadedBytes = singleTorrentStorageInfo.UploadedBytes
@@ -139,7 +222,11 @@ func CreateRunningTorrentArray(tclient *torrent.Client, TorrentLocalArray []*Sto
tickUpdateStruct.Hash = fullClientDB.TorrentHashString //needed for index tickUpdateStruct.Hash = fullClientDB.TorrentHashString //needed for index
Storage.UpdateStorageTick(db, tickUpdateStruct) Storage.UpdateStorageTick(db, tickUpdateStruct)
CalculateTorrentStatus(singleTorrent, fullClientDB) //calculate the status of the torrent, ie downloading seeding etc if singleTorrentStorageInfo.TorrentStatus != "Stopped" { //if the torrent is not stopped, try to discern the status of the torrent
CalculateTorrentStatus(singleTorrent, fullClientDB) //calculate the status of the torrent, ie downloading seeding etc
} else {
fullClientDB.Status = "Stopped"
}
RunningTorrentArray = append(RunningTorrentArray, *fullClientDB) RunningTorrentArray = append(RunningTorrentArray, *fullClientDB)
@@ -156,12 +243,20 @@ func CreateFileListArray(tclient *torrent.Client, selectedHash string) TorrentFi
tempHash := singleTorrent.InfoHash().String() tempHash := singleTorrent.InfoHash().String()
if tempHash == selectedHash { // if our selection hash equals our torrent hash if tempHash == selectedHash { // if our selection hash equals our torrent hash
torrentFilesRaw := singleTorrent.Files() torrentFilesRaw := singleTorrent.Files()
fmt.Println(torrentFilesRaw)
for _, singleFile := range torrentFilesRaw { for _, singleFile := range torrentFilesRaw {
TorrentFileStruct.TorrentHashString = tempHash TorrentFileStruct.TorrentHashString = tempHash
TorrentFileStruct.FileName = singleFile.DisplayPath() TorrentFileStruct.FileName = singleFile.DisplayPath()
TorrentFileStruct.FilePath = singleFile.Path() TorrentFileStruct.FilePath = singleFile.Path()
TorrentFileStruct.FilePercent = fmt.Sprintf("%.2f", float32(singleFile.Length())/float32(singleFile.Length())) //TODO figure out downloaded size of file PieceState := singleFile.State()
TorrentFileStruct.FilePriority = "Normal" //TODO, figure out how to store this per file in storage and also tie a priority to a file var downloadedBytes int64
for _, piece := range PieceState {
if piece.Complete {
downloadedBytes = downloadedBytes + piece.Bytes //adding up the bytes in the completed pieces
}
}
TorrentFileStruct.FilePercent = fmt.Sprintf("%.2f", float32(downloadedBytes)/float32(singleFile.Length()))
TorrentFileStruct.FilePriority = "Normal" //TODO, figure out how to store this per file in storage and also tie a priority to a file
TorrentFileStruct.FileSize = HumanizeBytes(float32(singleFile.Length())) TorrentFileStruct.FileSize = HumanizeBytes(float32(singleFile.Length()))
TorrentFileListSelected.FileList = append(TorrentFileListSelected.FileList, TorrentFileStruct) TorrentFileListSelected.FileList = append(TorrentFileListSelected.FileList, TorrentFileStruct)
} }
@@ -205,7 +300,6 @@ func CreateTorrentDetailJSON(tclient *torrent.Client, selectedHash string, torre
if tempHash == selectedHash { if tempHash == selectedHash {
fmt.Println("CreateTorrentDetail", localTorrentInfo) fmt.Println("CreateTorrentDetail", localTorrentInfo)
return TorrentDetailStruct return TorrentDetailStruct
break //only looking for one result
} }
} }
return TorrentDetailStruct return TorrentDetailStruct

View File

@@ -1,32 +1,109 @@
package engine //Settings.go contains all of the program settings package engine
import ( import (
"fmt"
"golang.org/x/time/rate"
"github.com/anacrolix/dht" "github.com/anacrolix/dht"
"github.com/anacrolix/torrent" "github.com/anacrolix/torrent"
"github.com/spf13/viper"
) )
//FullCLientSettings struct is a struct that can be read into anacrolix/torrent to setup a torrent client
type FullClientSettings struct { type FullClientSettings struct {
Version int Version int
torrent.Config TorrentConfig torrent.Config
TFileUploadFolder string TFileUploadFolder string
} }
//FullClientSettingsNew creates a new torrent client config TODO read from a TOML file func defaultConfig() FullClientSettings {
func FullClientSettingsNew() FullClientSettings {
//Config := fullClientSettings //generate a new struct
var Config FullClientSettings var Config FullClientSettings
Config.Version = 1.0 Config.Version = 1.0
Config.DataDir = "downloads" //the full OR relative path of the default download directory for torrents Config.TorrentConfig.DataDir = "downloads" //the full OR relative path of the default download directory for torrents
Config.TFileUploadFolder = "uploadedTorrents" Config.TFileUploadFolder = "uploadedTorrents"
Config.Seed = true Config.TorrentConfig.Seed = true
Config.DHTConfig = dht.ServerConfig{ Config.TorrentConfig.DHTConfig = dht.ServerConfig{
StartingNodes: dht.GlobalBootstrapAddrs, StartingNodes: dht.GlobalBootstrapAddrs,
} }
return Config
}
func dhtServerSettings(dhtConfig dht.ServerConfig) dht.ServerConfig {
viper.UnmarshalKey("DHTConfig", &dhtConfig)
fmt.Println("dhtconfig", dhtConfig)
return dhtConfig
}
func FullClientSettingsNew() FullClientSettings {
viper.SetConfigName("config")
viper.AddConfigPath("./")
err := viper.ReadInConfig()
if err != nil {
fmt.Println("Error reading in config, using defaults", err)
FullClientSettings := defaultConfig()
return FullClientSettings
}
dataDir := viper.GetString("torrentClientConfig.DownloadDir")
listenAddr := viper.GetString("torrentClientConfig.ListenAddr")
disablePex := viper.GetBool("torrentClientConfig.DisablePEX")
noDHT := viper.GetBool("torrentClientConfig.NoDHT")
noUpload := viper.GetBool("torrentClientConfig.NoUpload")
seed := viper.GetBool("torrentClientConfig.Seed")
peerID := viper.GetString("torrentClientConfig.PeerID")
disableUTP := viper.GetBool("torrentClientConfig.DisableUTP")
disableTCP := viper.GetBool("torrentClientConfig.DisableTCP")
disableIPv6 := viper.GetBool("torrentClientConfig.DisableIPv6")
debug := viper.GetBool("torrentClientConfig.Debug")
dhtServerConfig := dht.ServerConfig{
StartingNodes: dht.GlobalBootstrapAddrs,
}
if viper.IsSet("DHTConfig") {
fmt.Println("Reading in custom DHT config")
dhtServerConfig = dhtServerSettings(dhtServerConfig)
}
uploadRateLimiter := new(rate.Limiter)
viper.UnmarshalKey("UploadRateLimiter", &uploadRateLimiter)
downloadRateLimiter := new(rate.Limiter)
viper.UnmarshalKey("DownloadRateLimiter", &downloadRateLimiter)
rreferNoEncryption := viper.GetBool("EncryptionPolicy.PreferNoEncryption")
fmt.Println("Encryption", rreferNoEncryption)
encryptionPolicy := torrent.EncryptionPolicy{
DisableEncryption: viper.GetBool("EncryptionPolicy.DisableEncryption"),
ForceEncryption: viper.GetBool("EncryptionPolicy.ForceEncryption"),
PreferNoEncryption: viper.GetBool("EncryptionPolicy.PreferNoEncryption"),
}
tConfig := torrent.Config{
DataDir: dataDir,
ListenAddr: listenAddr,
DisablePEX: disablePex,
NoDHT: noDHT,
DHTConfig: dhtServerConfig,
NoUpload: noUpload,
Seed: seed,
//UploadRateLimiter: uploadRateLimiter,
//DownloadRateLimiter: downloadRateLimiter,
PeerID: peerID,
DisableUTP: disableUTP,
DisableTCP: disableTCP,
DisableIPv6: disableIPv6,
Debug: debug,
EncryptionPolicy: encryptionPolicy,
}
Config := FullClientSettings{TorrentConfig: tConfig, TFileUploadFolder: "uploadedTorrents"}
return Config return Config
} }

202
main.go
View File

@@ -13,13 +13,22 @@ import (
"github.com/anacrolix/torrent" "github.com/anacrolix/torrent"
"github.com/asdine/storm" "github.com/asdine/storm"
//"github.com/boltdb/bolt"
Engine "github.com/deranjer/goTorrent/engine" Engine "github.com/deranjer/goTorrent/engine"
Storage "github.com/deranjer/goTorrent/storage" Storage "github.com/deranjer/goTorrent/storage"
"github.com/gorilla/mux" "github.com/gorilla/mux"
"github.com/gorilla/websocket" "github.com/gorilla/websocket"
"github.com/mmcdole/gofeed"
) )
//SingleRSSFeedMessage will most likley be deprecated as this is the only way I could get it working currently
type SingleRSSFeedMessage struct { //TODO had issues with getting this to work with Storage or Engine
MessageType string
URL string //the URL of the individual RSS feed
Name string
TotalTorrents int
Torrents []Storage.SingleRSSTorrent //name of the torrentss
}
var ( var (
httpAddr = flag.String("addr", ":8000", "Http server address") httpAddr = flag.String("addr", ":8000", "Http server address")
baseTmpl string = "templates/base.tmpl" baseTmpl string = "templates/base.tmpl"
@@ -43,14 +52,12 @@ func updateClient(torrentstats []Engine.ClientDB, conn *websocket.Conn) { //get
} }
func main() { func main() {
//setting up the torrent client
Config := Engine.FullClientSettingsNew() //grabbing from settings.go Config := Engine.FullClientSettingsNew() //grabbing from settings.go
os.Mkdir(Config.TFileUploadFolder, os.ModeDir) //creating a directory to store uploaded torrent files os.Mkdir(Config.TFileUploadFolder, os.ModeDir) //creating a directory to store uploaded torrent files
torrentLocalStorage := Storage.TorrentLocal{} //creating a new struct that stores all of our local storage info torrentLocalStorage := Storage.TorrentLocal{} //creating a new struct that stores all of our local storage info
fmt.Printf("%+v\n", Config)
//fmt.Printf("%+v\n", Config) tclient, err := torrent.NewClient(&Config.TorrentConfig) //pulling out the torrent specific config to use
tclient, err := torrent.NewClient(&Config.Config) //pulling out the torrent specific config to use
if err != nil { if err != nil {
log.Fatalf("error creating client: %s", err) log.Fatalf("error creating client: %s", err)
} }
@@ -61,14 +68,21 @@ func main() {
} }
defer db.Close() //defering closing the database until the program closes defer db.Close() //defering closing the database until the program closes
cronEngine := Engine.InitializeCronEngine() //Starting the cron engine for tasks
Engine.RefreshRSSCron(cronEngine, db) // Refresing the RSS feeds on an hourly basis
var TorrentLocalArray = []*Storage.TorrentLocal{} //this is an array of ALL of the local storage torrents, they will be added back in via hash var TorrentLocalArray = []*Storage.TorrentLocal{} //this is an array of ALL of the local storage torrents, they will be added back in via hash
var RunningTorrentArray = []Engine.ClientDB{} //this stores ALL of the torrents that are running, used for client update pushes combines Local Storage and Running tclient info var RunningTorrentArray = []Engine.ClientDB{} //this stores ALL of the torrents that are running, used for client update pushes combines Local Storage and Running tclient info
var PreviousTorrentArray = []Engine.ClientDB{} var PreviousTorrentArray = []Engine.ClientDB{}
TorrentLocalArray = Storage.ReadInTorrents(db) //pulling in all the already added torrents TorrentLocalArray = Storage.ReadInTorrents(db) //pulling in all the already added torrents
if TorrentLocalArray != nil { //the first creation of the running torrent array if TorrentLocalArray != nil { //the first creation of the running torrent array //since we are adding all of them in we use a coroutine... just allows the web ui to load then it will load in the torrents
RunningTorrentArray = Engine.CreateRunningTorrentArray(tclient, TorrentLocalArray, PreviousTorrentArray, Config, db) //Updates the RunningTorrentArray with the current client data as well go func() { //TODO instead of running all torrent fetches in coroutine see if possible to run each single one in a routine so we don't wait for ALL of them to be verified
RunningTorrentArray = Engine.CreateRunningTorrentArray(tclient, TorrentLocalArray, PreviousTorrentArray, Config, db)
}()
//RunningTorrentArray = Engine.CreateRunningTorrentArray(tclient, TorrentLocalArray, PreviousTorrentArray, Config, db) //Updates the RunningTorrentArray with the current client data as well
} else { } else {
fmt.Println("Database is empty!") fmt.Println("Database is empty!")
@@ -98,7 +112,7 @@ func main() {
fmt.Println("Error adding Torrent from file: ", fileName.Name()) fmt.Println("Error adding Torrent from file: ", fileName.Name())
} else { } else {
fmt.Println("Adding Torrent via file", fileName) fmt.Println("Adding Torrent via file", fileName)
Engine.StartTorrent(clientTorrent, torrentLocalStorage, db, Config.DataDir, "file", fileName.Name()) Engine.StartTorrent(clientTorrent, torrentLocalStorage, db, Config.TorrentConfig.DataDir, "file", fileName.Name()) // the starttorrent can take a LONG time on startup
} }
}) })
@@ -121,12 +135,14 @@ func main() {
log.Println("Generic websocket error", err) log.Println("Generic websocket error", err)
return return
} }
MessageLoop: //Tagging this so we can break out of it with any errors we encounter that are failing
for { for {
runningTorrents := tclient.Torrents() //getting running torrents here since multiple cases ask for the running torrents runningTorrents := tclient.Torrents() //getting running torrents here since multiple cases ask for the running torrents
msg := Engine.Message{} msg := Engine.Message{}
readJSONError := conn.ReadJSON(&msg) readJSONError := conn.ReadJSON(&msg)
if readJSONError != nil { if readJSONError != nil {
fmt.Println("Unable to read JSON client message", err) fmt.Println("Unable to read JSON client message", err)
break MessageLoop
} }
fmt.Println("MessageFull", msg) fmt.Println("MessageFull", msg)
@@ -134,7 +150,7 @@ func main() {
case "torrentListRequest": case "torrentListRequest":
//fmt.Println("client Requested TorrentList Update") //fmt.Println("client Requested TorrentList Update")
TorrentLocalArray = Storage.ReadInTorrents(db) TorrentLocalArray = Storage.ReadInTorrents(db) //Required to re-read th database since we write to the DB and this will pull the changes from it
RunningTorrentArray = Engine.CreateRunningTorrentArray(tclient, TorrentLocalArray, PreviousTorrentArray, Config, db) //Updates the RunningTorrentArray with the current client data as well RunningTorrentArray = Engine.CreateRunningTorrentArray(tclient, TorrentLocalArray, PreviousTorrentArray, Config, db) //Updates the RunningTorrentArray with the current client data as well
PreviousTorrentArray = RunningTorrentArray PreviousTorrentArray = RunningTorrentArray
var torrentlistArray = new(Engine.TorrentList) var torrentlistArray = new(Engine.TorrentList)
@@ -144,43 +160,96 @@ func main() {
//fmt.Println("%+v\n", PreviousTorrentArray) //fmt.Println("%+v\n", PreviousTorrentArray)
//fmt.Printf("%+v\n", torrentlistArray) //fmt.Printf("%+v\n", torrentlistArray)
conn.WriteJSON(torrentlistArray) conn.WriteJSON(torrentlistArray)
break
//updateClient(RunningTorrentArray, conn) // sending the client update information over the websocket //updateClient(RunningTorrentArray, conn) // sending the client update information over the websocket
case "torrentFileListRequest": //client requested a filelist update case "torrentFileListRequest": //client requested a filelist update
fmt.Println("client Requested Filelist update") //fmt.Println("client Requested Filelist update")
FileListArray := Engine.CreateFileListArray(tclient, msg.Payload[0]) FileListArray := Engine.CreateFileListArray(tclient, msg.Payload[0])
conn.WriteJSON(FileListArray) //writing the JSON to the client conn.WriteJSON(FileListArray) //writing the JSON to the client
break
case "torrentDetailedInfo": //TODO Figure out how to get single torrent info correctly case "torrentDetailedInfo": //TODO Figure out how to get single torrent info correctly
fmt.Println("client requested detailed Torrent Info") fmt.Println("client requested detailed Torrent Info")
torrentDetailArray := Engine.CreateTorrentDetailJSON(tclient, msg.Payload[0], db) torrentDetailArray := Engine.CreateTorrentDetailJSON(tclient, msg.Payload[0], db)
conn.WriteJSON(torrentDetailArray) conn.WriteJSON(torrentDetailArray)
break
case "torrentPeerListRequest": case "torrentPeerListRequest":
fmt.Println("client requested peer list") fmt.Println("client requested peer list")
torrentPeerList := Engine.CreatePeerListArray(tclient, msg.Payload[0]) torrentPeerList := Engine.CreatePeerListArray(tclient, msg.Payload[0])
//fmt.Printf("%+v\n", torrentPeerList) //fmt.Printf("%+v\n", torrentPeerList)
//JSONTEST, _ := json.Marshal(torrentPeerList)
//fmt.Println(JSONTEST)
conn.WriteJSON(torrentPeerList) conn.WriteJSON(torrentPeerList)
break
case "rssFeedRequest":
fmt.Println("client requested RSS feed")
RSSList := Storage.FetchRSSFeeds(db)
RSSJSONFeed := Engine.RSSJSONList{MessageType: "rssListRequest", TotalRSSFeeds: len(RSSList.RSSFeeds)}
RSSsingleFeed := Engine.RSSFeedsNames{}
for _, singleFeed := range RSSList.RSSFeeds {
RSSsingleFeed.RSSName = singleFeed.Name
RSSsingleFeed.RSSFeedURL = singleFeed.URL
RSSJSONFeed.RSSFeeds = append(RSSJSONFeed.RSSFeeds, RSSsingleFeed)
}
conn.WriteJSON(RSSJSONFeed)
case "addRSSFeed":
fmt.Println("Adding RSSFeed", msg.Payload[0])
newRSSFeed := msg.Payload[0] //there will only be one RSS feed (hopefully)
fullRSSFeeds := Storage.FetchRSSFeeds(db)
fmt.Println("Pulled full RSS feeds from database: ", fullRSSFeeds)
for _, singleFeed := range fullRSSFeeds.RSSFeeds {
if newRSSFeed == singleFeed.URL || newRSSFeed == "" {
fmt.Println("Empty URL or Duplicate RSS URL to one already in database! Rejecting submission")
break MessageLoop
}
}
fp := gofeed.NewParser()
feed, err := fp.ParseURL(newRSSFeed)
if err != nil {
fmt.Println("Unable to parse the URL as valid RSS.. cannot add RSS...", newRSSFeed)
break MessageLoop
}
fmt.Println("Have feed from URL...", feed.Title)
newRSSFeedFull := Storage.SingleRSSFeed{}
newRSSFeedFull.Name = feed.Title
newRSSFeedFull.URL = msg.Payload[0]
fullRSSFeeds.RSSFeeds = append(fullRSSFeeds.RSSFeeds, newRSSFeedFull) // add the new RSS feed to the stack
Engine.ForceRSSRefresh(db, fullRSSFeeds)
//forcing an RSS refresh to fully populate all rss feeds TODO maybe just push the update of the new RSS feed and leave cron to update? But user would most likely expect and immediate update
case "deleteRSSFeed":
fmt.Println("Deleting RSS Feed", msg.Payload[0])
removingRSSFeed := msg.Payload[0]
Storage.DeleteRSSFeed(db, removingRSSFeed)
fullRSSFeeds := Storage.FetchRSSFeeds(db)
Engine.ForceRSSRefresh(db, fullRSSFeeds)
case "rssTorrentsRequest":
fmt.Println("Requesting Torrent List for feed", msg.Payload[0])
RSSFeedURL := msg.Payload[0]
fullRSSFeeds := Storage.FetchRSSFeeds(db)
for _, singleFeed := range fullRSSFeeds.RSSFeeds {
fmt.Println("URL", singleFeed.URL)
}
UpdatedRSSFeed := Engine.RefreshSingleRSSFeed(db, Storage.FetchSpecificRSSFeed(db, RSSFeedURL))
TorrentRSSList := SingleRSSFeedMessage{MessageType: "rssTorrentList", URL: RSSFeedURL, Name: UpdatedRSSFeed.Name, TotalTorrents: len(UpdatedRSSFeed.Torrents), Torrents: UpdatedRSSFeed.Torrents}
conn.WriteJSON(TorrentRSSList)
case "magnetLinkSubmit": //if we detect a magnet link we will be adding a magnet torrent case "magnetLinkSubmit": //if we detect a magnet link we will be adding a magnet torrent
clientTorrent, err := tclient.AddMagnet(msg.Payload[0]) //reading the payload into the torrent client
if err != nil {
fmt.Println("Magnet Error", err)
}
fmt.Println(clientTorrent)
fmt.Printf("Adding Magnet Link")
Engine.StartTorrent(clientTorrent, torrentLocalStorage, db, Config.DataDir, "magnet", "") //starting the torrent and creating local DB entry for _, magnetLink := range msg.Payload {
break clientTorrent, err := tclient.AddMagnet(magnetLink) //reading the payload into the torrent client
if err != nil {
fmt.Println("Magnet Error could not add torrent! ", err)
break MessageLoop //break out of the loop entirely for this message since we hit an error
}
fmt.Println(clientTorrent)
fmt.Printf("Adding Magnet Link")
Engine.StartTorrent(clientTorrent, torrentLocalStorage, db, Config.TorrentConfig.DataDir, "magnet", "") //starting the torrent and creating local DB entry
}
case "stopTorrents": case "stopTorrents":
TorrentListCommands := msg.Payload TorrentListCommands := msg.Payload
@@ -189,11 +258,15 @@ func main() {
for _, singleSelection := range TorrentListCommands { for _, singleSelection := range TorrentListCommands {
if singleTorrent.InfoHash().String() == singleSelection { if singleTorrent.InfoHash().String() == singleSelection {
fmt.Println("Matched for stopping torrents") fmt.Println("Matched for stopping torrents")
singleTorrent.SetMaxEstablishedConns(0) //Forcing the max amount of connections allowed to zero effectively stopping it tempTorrentLocal := Storage.TorrentLocal{}
tempTorrentLocal.Hash = singleTorrent.InfoHash().String() //required since this is the ID that stormdb requires
tempTorrentLocal.TorrentStatus = "Stopped"
oldMax := singleTorrent.SetMaxEstablishedConns(0) //Forcing the max amount of connections allowed to zero effectively stopping it
fmt.Println("Setting max connections from ", oldMax, " to 0")
Storage.UpdateStorageTick(db, tempTorrentLocal) //Updating the torrent status
} }
} }
} }
break
case "deleteTorrents": case "deleteTorrents":
for _, singleTorrent := range runningTorrents { for _, singleTorrent := range runningTorrents {
@@ -201,12 +274,10 @@ func main() {
for _, singleSelection := range msg.Payload { for _, singleSelection := range msg.Payload {
if singleTorrent.InfoHash().String() == singleSelection { if singleTorrent.InfoHash().String() == singleSelection {
fmt.Println("Matched for deleting torrents") fmt.Println("Matched for deleting torrents")
singleTorrent.Drop() Storage.DelTorrentLocalStorage(db, singleTorrent.InfoHash().String())
//Storage.DelTorrentLocalStorage(db)
} }
} }
} }
break
case "startTorrents": case "startTorrents":
fmt.Println("Starting torrents", msg.Payload) fmt.Println("Starting torrents", msg.Payload)
@@ -215,11 +286,76 @@ func main() {
for _, singleSelection := range msg.Payload { for _, singleSelection := range msg.Payload {
if singleTorrent.InfoHash().String() == singleSelection { if singleTorrent.InfoHash().String() == singleSelection {
fmt.Println("Matched for starting torrents", singleSelection) fmt.Println("Matched for starting torrents", singleSelection)
singleTorrent.DownloadAll() tempTorrentLocal := Storage.TorrentLocal{}
tempTorrentLocal.Hash = singleTorrent.InfoHash().String() //required since this is the ID that stormdb requires
tempTorrentLocal.TorrentStatus = "Running" //Setting the status back to running
oldTorrentInfo := Storage.FetchTorrentFromStorage(db, singleTorrent.InfoHash().String()) //Fetching the old max connections setting from the database
if oldTorrentInfo.MaxConnections == 0 { //if somehow the old max was set at zero change it to 80
oldTorrentInfo.MaxConnections = 80
Storage.UpdateStorageTick(db, oldTorrentInfo)
}
oldMax := singleTorrent.SetMaxEstablishedConns(oldTorrentInfo.MaxConnections) //Forcing the max amount of connections allowed to zero effectively stopping it
fmt.Println("Setting max connections from 0 to:", oldMax)
Storage.UpdateStorageTick(db, tempTorrentLocal) //Updating the torrent status
}
}
}
case "setFilePriority": //TODO have one priority second message determines what priority
fmt.Println("Setting file priority", msg.Payload)
priorityRequested := msg.Payload[1] //storing the priority requested
infoHash := msg.Payload[0] //storing our infohash
fileList := append(msg.Payload[:0], msg.Payload[2:]...) //removing the filehash and priority from the array leaving just the filepath
fmt.Println("fileList after stripping out", fileList)
for _, singleTorrent := range runningTorrents {
if singleTorrent.InfoHash().String() == infoHash {
fmt.Println("Matched for changing file prio torrents", singleTorrent)
for _, file := range singleTorrent.Files() {
for _, sentFile := range fileList {
if file.Path() == sentFile {
if priorityRequested == "High" {
fileRead := singleTorrent.NewReader()
fileRead.Seek(file.Offset(), 0)
fileRead.SetReadahead(file.Length())
fmt.Println("Setting priority for HIGH", file.DisplayPath())
activeTorrentStruct := Storage.FetchTorrentFromStorage(db, infoHash) //fetching all the data from the db to update certain fields then write it all back
for i, specificFile := range activeTorrentStruct.TorrentFilePriority { //searching for that specific file
if specificFile.TorrentFilePath == file.DisplayPath() {
activeTorrentStruct.TorrentFilePriority[i].TorrentFilePriority = "High" //writing just that field to the current struct
}
}
Storage.UpdateStorageTick(db, activeTorrentStruct) //rewritting essentially that entire struct right back into the database
}
if priorityRequested == "Normal" {
file.Download()
fmt.Println("Setting priority for Normal", file.DisplayPath())
activeTorrentStruct := Storage.FetchTorrentFromStorage(db, infoHash) //fetching all the data from the db to update certain fields then write it all back
for i, specificFile := range activeTorrentStruct.TorrentFilePriority { //searching for that specific file
if specificFile.TorrentFilePath == file.DisplayPath() {
activeTorrentStruct.TorrentFilePriority[i].TorrentFilePriority = "Normal" //writing just that field to the current struct
}
}
Storage.UpdateStorageTick(db, activeTorrentStruct) //rewritting essentially that entire struct right back into the database
}
if priorityRequested == "Cancel" {
file.Cancel()
fmt.Println("Canceling File", file.DisplayPath())
activeTorrentStruct := Storage.FetchTorrentFromStorage(db, infoHash) //fetching all the data from the db to update certain fields then write it all back
for i, specificFile := range activeTorrentStruct.TorrentFilePriority { //searching for that specific file
if specificFile.TorrentFilePath == file.DisplayPath() {
activeTorrentStruct.TorrentFilePriority[i].TorrentFilePriority = "Canceled" //writing just that field to the current struct
}
}
Storage.UpdateStorageTick(db, activeTorrentStruct) //rewritting essentially that entire struct right back into the database
}
}
}
} }
} }
} }
break
default: default:
//conn.Close() //conn.Close()

File diff suppressed because one or more lines are too long

View File

@@ -1,30 +0,0 @@
package engine
import (
"github.com/anacrolix/dht"
"github.com/anacrolix/torrent"
)
type FullClientSettings struct {
Version int
torrent.Config
TFileUploadFolder string
}
func FullClientSettingsNew() FullClientSettings {
//Config := fullClientSettings //generate a new struct
var Config FullClientSettings
Config.Version = 1.0
Config.DataDir = "downloads" //the full OR relative path of the default download directory for torrents
Config.TFileUploadFolder = "uploadedTorrents"
Config.Seed = true
Config.DHTConfig = dht.ServerConfig{
StartingNodes: dht.GlobalBootstrapAddrs,
}
return Config
}

Binary file not shown.

BIN
storage.db.old Normal file

Binary file not shown.

View File

@@ -6,30 +6,56 @@ import (
"github.com/asdine/storm" "github.com/asdine/storm"
) )
//RSSFeedStore stores all of our RSS feeds in a slice of gofeed.Feed
type RSSFeedStore struct {
ID int `storm:"id,unique"` //storm requires unique ID to save although there will only be one of these
RSSFeeds []SingleRSSFeed //slice of string containing URL's in string form for gofeed to parse
}
//SingleRSSFeed stores an RSS feed with a list of all the torrents in the feed
type SingleRSSFeed struct {
URL string `storm:"id,unique"` //the URL of the individual RSS feed
Name string
Torrents []SingleRSSTorrent //name of the torrents
}
//SingleRSSTorrent stores a single RSS torrent with all the relevant information
type SingleRSSTorrent struct {
Link string `storm:"id,unique"`
Title string
PubDate string //TODO, change this to a date of some kind
}
//TorrentFilePriority stores the priority for each file in a torrent
type TorrentFilePriority struct {
TorrentFilePath string
TorrentFilePriority string
}
//TorrentLocal is local storage of the torrents for readd on server restart, marshalled into the database using Storm //TorrentLocal is local storage of the torrents for readd on server restart, marshalled into the database using Storm
type TorrentLocal struct { type TorrentLocal struct {
Hash string `storm:"id,unique"` //Hash should be unique for every torrent... if not we are re-adding an already added torrent \\TODO check for re-add of same torrent Hash string `storm:"id,unique"` //Hash should be unique for every torrent... if not we are re-adding an already added torrent \\TODO check for re-add of same torrent
DateAdded string InfoBytes []byte
StoragePath string DateAdded string
TorrentName string StoragePath string
TorrentStatus string TorrentName string
TorrentType string //magnet or .torrent file TorrentStatus string
TorrentFileName string MaxConnections int
Label string //for labeling torrent files TorrentType string //magnet or .torrent file
UploadedBytes int64 TorrentFileName string
DownloadedBytes int64 //TODO not sure if needed since we should have the file which contains the bytes TorrentFile []byte //TODO store and reteive torrent file from here
UploadRatio string Label string //for labeling torrent files
UploadedBytes int64
DownloadedBytes int64 //TODO not sure if needed since we should have the file which contains the bytes
UploadRatio string
TorrentFilePriority []TorrentFilePriority
} }
//ReadInTorrents is called to read in ALL local stored torrents in the boltdb database (called on server restart) //ReadInTorrents is called to read in ALL local stored torrents in the boltdb database (called on server restart)
func ReadInTorrents(torrentStorage *storm.DB) (torrentLocalArray []*TorrentLocal) { func ReadInTorrents(torrentStorage *storm.DB) (torrentLocalArray []*TorrentLocal) {
err := torrentStorage.Init(&TorrentLocal{}) //initializing buckets and indexes since this function runs on start
if err != nil {
fmt.Println("Error initializing and indexing database....", err)
}
torrentLocalArray = []*TorrentLocal{} //creating the array of the torrentlocal struct torrentLocalArray = []*TorrentLocal{} //creating the array of the torrentlocal struct
err = torrentStorage.All(&torrentLocalArray) //unmarshalling the database into the []torrentlocal err := torrentStorage.All(&torrentLocalArray) //unmarshalling the database into the []torrentlocal
if err != nil { if err != nil {
fmt.Println("Error reading database into torrentLocalArray", err) fmt.Println("Error reading database into torrentLocalArray", err)
} }
@@ -48,11 +74,15 @@ func AddTorrentLocalStorage(torrentStorage *storm.DB, local TorrentLocal) {
} }
//DelTorrentLocalStorage is called to delete a torrent when we fail (for whatever reason to load the information for it). Deleted by HASH matching. //DelTorrentLocalStorage is called to delete a torrent when we fail (for whatever reason to load the information for it). Deleted by HASH matching.
func DelTorrentLocalStorage(torrentStorage *storm.DB, local *TorrentLocal) { //TODO do a ONE and purge the torrent that way func DelTorrentLocalStorage(torrentStorage *storm.DB, selectedHash string) {
println("Deleting torrent", local.TorrentFileName) singleTorrentInfo := TorrentLocal{}
err := torrentStorage.DeleteStruct(&local) err := torrentStorage.One("Hash", selectedHash, &singleTorrentInfo) //finding the torrent by the hash passed in and storing it in a struct
if err != nil { if err != nil {
fmt.Println("Error deleting torrent from database", err) fmt.Println("Error finding torrent with hash ", selectedHash, " to delete", err)
}
err = torrentStorage.DeleteStruct(&singleTorrentInfo) //deleting that struct from the database
if err != nil {
fmt.Println("Error deleting torrent ", singleTorrentInfo, " from database", err)
} }
} }
@@ -66,7 +96,6 @@ func UpdateStorageTick(torrentStorage *storm.DB, torrentLocal TorrentLocal) {
//FetchTorrentFromStorage grabs the localtorrent info from the bolt database for usage found by torrenthash //FetchTorrentFromStorage grabs the localtorrent info from the bolt database for usage found by torrenthash
func FetchTorrentFromStorage(torrentStorage *storm.DB, selectedHash string) TorrentLocal { func FetchTorrentFromStorage(torrentStorage *storm.DB, selectedHash string) TorrentLocal {
singleTorrentInfo := TorrentLocal{} singleTorrentInfo := TorrentLocal{}
err := torrentStorage.One("Hash", selectedHash, &singleTorrentInfo) err := torrentStorage.One("Hash", selectedHash, &singleTorrentInfo)
if err != nil { if err != nil {
@@ -75,3 +104,57 @@ func FetchTorrentFromStorage(torrentStorage *storm.DB, selectedHash string) Torr
return singleTorrentInfo return singleTorrentInfo
} }
//FetchRSSFeeds fetches the RSS feed from db, which was setup when initializing database on first startup
func FetchRSSFeeds(db *storm.DB) RSSFeedStore {
RSSFeed := RSSFeedStore{}
err := db.One("ID", 1, &RSSFeed) //The ID of 1 should be unique since we will only have one entry
if err != nil { //If we fail to find it in the DB, create it, will happen at first run
fmt.Println("Failure retrieving RSS feeds, creating bucket for RSS feeds, expected behaviour if first run for RSS", err)
RSSFeed := RSSFeedStore{}
RSSFeed.ID = 1
err = db.Save(&RSSFeed)
if err != nil {
fmt.Println("Fatal error trying to create RSSFeedStore in database")
}
return RSSFeed
}
return RSSFeed
}
//FetchSpecificRSSFeed pulls one feed from the database to send to the client
func FetchSpecificRSSFeed(db *storm.DB, RSSFeedURL string) SingleRSSFeed {
allRSSFeeds := FetchRSSFeeds(db)
singleRSSFeedRet := SingleRSSFeed{}
for _, singleRSSFeed := range allRSSFeeds.RSSFeeds {
if singleRSSFeed.URL == RSSFeedURL {
singleRSSFeedRet.Name = singleRSSFeed.Name
singleRSSFeedRet.URL = singleRSSFeed.URL
singleRSSFeedRet.Torrents = singleRSSFeed.Torrents
}
}
return singleRSSFeedRet
}
//UpdateRSSFeeds updates the RSS feeds everytime they are changed
func UpdateRSSFeeds(db *storm.DB, RSSFeed RSSFeedStore) {
err := db.Update(&RSSFeed)
if err != nil {
fmt.Println("Error performing RSS Update", err)
}
}
//DeleteRSSFeed grabs old database then recreates it without the deleted RSS Feed
func DeleteRSSFeed(db *storm.DB, RSSFeedURL string) {
RSSFeedStoreOld := FetchRSSFeeds(db) //Fetching current store to update
newRSSFeedStore := RSSFeedStore{ID: RSSFeedStoreOld.ID} //creating new store
for _, RSSFeed := range RSSFeedStoreOld.RSSFeeds { //recreating entire store and excluding that one RSS feed we don't want
if RSSFeed.URL != RSSFeedURL {
newRSSFeedStore.RSSFeeds = append(newRSSFeedStore.RSSFeeds, RSSFeed)
}
}
err := db.Update(&newRSSFeedStore)
if err != nil {
fmt.Println("Error deleting RSS feed from db", err)
}
}

View File

@@ -14,6 +14,7 @@ let torrents= [];
let peerList = []; let peerList = [];
let fileList = []; let fileList = [];
let RSSList = []; let RSSList = [];
let RSSTorrentList = [];
var torrentListRequest = { var torrentListRequest = {
messageType: "torrentListRequest" messageType: "torrentListRequest"
@@ -100,14 +101,25 @@ ws.onmessage = function (evt) { //When we recieve a message from the websocket
RSSList = []; RSSList = [];
for (var i = 0; i < serverMessage.TotalRSSFeeds; i++){ for (var i = 0; i < serverMessage.TotalRSSFeeds; i++){
RSSList.push({ RSSList.push({
RSSURL: serverMessage.RSSFeeds[i] RSSURL: serverMessage.RSSFeeds[i].RSSFeedURL,
RSSName: serverMessage.RSSFeeds[i].RSSName,
}) })
} }
console.log("RSSURLS", RSSList) console.log("RSSURLS", RSSList)
console.log("FIRSTURL", RSSList[0]) console.log("FIRSTURL", RSSList[1])
console.log("FULLURL", RSSList[0].RSSURL) console.log("FULLURL", RSSList[1].RSSURL)
break; break;
case "rssTorrentList":
//console.log("RSSTorrentList recieved", evt.data)
RSSTorrentList = [];
for (var i = 0; i < serverMessage.TotalTorrents; i++){
RSSTorrentList.push({
TorrentName: serverMessage.Torrents[i].Title,
TorrentLink: serverMessage.Torrents[i].Link,
PublishDate: serverMessage.Torrents[i].PubDate,
})
}
} }
} }
@@ -175,15 +187,23 @@ class BackendSocket extends React.Component {
this.timerID = setInterval( this.timerID = setInterval(
() => this.tick(), () => this.tick(),
2000 2000
); );
} }
componentWillUnmount() { componentWillUnmount() {
clearInterval(this.timerID); clearInterval(this.timerID);
} }
tick() { // this tick is the main tick that updates ALL of the components that update on tick... which is a lot tick() { // this tick is the main tick that updates ALL of the components that update on tick... which is a lot
if (this.props.RSSList != RSSList & this.props.RSSModalOpen == true) {
this.props.newRSSFeedStore(RSSList) //pushing the new RSSList to Redux
}
if (this.props.RSSTorrentList != RSSTorrentList & this.props.RSSModalOpen == true){
this.props.RSSTorrentList(RSSTorrentList) //pushing the new RSSTorrentList to Redux
}
ws.send(JSON.stringify(torrentListRequest))//talking to the server to get the torrent list ws.send(JSON.stringify(torrentListRequest))//talking to the server to get the torrent list
//console.log("Torrentlist", torrents) //console.log("Torrentlist", torrents)
this.props.setButtonState(this.props.selection) //forcing an update to the buttons this.props.setButtonState(this.props.selection) //forcing an update to the buttons
@@ -218,7 +238,6 @@ class BackendSocket extends React.Component {
if (nextProps.selectionHashes.length === 1){ //if we have a selection pass it on for the tabs to verify if (nextProps.selectionHashes.length === 1){ //if we have a selection pass it on for the tabs to verify
this.selectionHandler(nextProps.selectionHashes, nextProps.selectedTab) this.selectionHandler(nextProps.selectionHashes, nextProps.selectedTab)
} }
} }
@@ -241,6 +260,8 @@ const mapStateToProps = state => {
selectionHashes: state.selectionHashes, selectionHashes: state.selectionHashes,
selectedTab: state.selectedTab, selectedTab: state.selectedTab,
selection: state.selection, selection: state.selection,
RSSModalOpen: state.RSSModalOpen,
RSSTorrentList: state.RSSTorrentList,
}; };
} }
@@ -254,6 +275,7 @@ const mapDispatchToProps = dispatch => {
newFileList: (fileList) => dispatch({type: actionTypes.FILE_LIST, fileList}), newFileList: (fileList) => dispatch({type: actionTypes.FILE_LIST, fileList}),
setButtonState: (buttonState) => dispatch({type: actionTypes.SET_BUTTON_STATE, buttonState}), setButtonState: (buttonState) => dispatch({type: actionTypes.SET_BUTTON_STATE, buttonState}),
newRSSFeedStore: (RSSList) => dispatch({type: actionTypes.NEW_RSS_FEED_STORE, RSSList}), newRSSFeedStore: (RSSList) => dispatch({type: actionTypes.NEW_RSS_FEED_STORE, RSSList}),
RSSTorrentList: (RSSTorrentList) => dispatch({type: actionTypes.RSS_TORRENT_LIST, RSSTorrentList}),
//changeSelection: (selection) => dispatch({type: actionTypes.CHANGE_SELECTION, selection}),//forcing an update to the buttons //changeSelection: (selection) => dispatch({type: actionTypes.CHANGE_SELECTION, selection}),//forcing an update to the buttons
} }

View File

@@ -56,13 +56,16 @@ const inlineStyle = {
open: false, open: false,
}; };
componentWillMount () { componentDidMount () {
let RSSRequest = { let RSSRequest = {
messageType: "rssFeedRequest", messageType: "rssFeedRequest",
} }
ws.send(JSON.stringify(RSSRequest)) //Immediatly request an update of the feed when you add a new URL ws.send(JSON.stringify(RSSRequest)) //Immediatly request an update of the feed when you add a new URL
} }
handleClickOpen = () => { handleClickOpen = () => {
this.setState({ open: true }); this.setState({ open: true });
} }
@@ -104,7 +107,7 @@ const inlineStyle = {
<ReactTooltip place="top" type="light" effect="float" /> <ReactTooltip place="top" type="light" effect="float" />
<RSSTorrentIcon /> <RSSTorrentIcon />
</IconButton> </IconButton>
<Dialog fullWidth open={this.state.open} onRequestClose={this.handleRequestClose}> <Dialog fullScreen open={this.state.open} onRequestClose={this.handleRequestClose}>
<DialogTitle>Manage RSS Feeds</DialogTitle> <DialogTitle>Manage RSS Feeds</DialogTitle>
<DialogContent> <DialogContent>
<TextField <TextField
@@ -115,13 +118,13 @@ const inlineStyle = {
label="Add New RSS URL" label="Add New RSS URL"
type="text" type="text"
placeholder="Enter RSS URL Here.." placeholder="Enter RSS URL Here.."
//onChange={this.setTextValue} onChange={this.setTextValue}
/> />
<IconButton onClick={this.handleAddRSSFeed} color="primary" data-tip="Manage RSS Feeds" style={smallButton} aria-label="Add RSS Feeds"> <IconButton onClick={this.handleAddRSSFeed} color="primary" data-tip="Manage RSS Feeds" style={smallButton} aria-label="Add RSS Feeds">
<ReactTooltip place="top" type="light" effect="float" /> <ReactTooltip place="top" type="light" effect="float" />
<AddRSSIcon /> <AddRSSIcon />
</IconButton> </IconButton>
{this.state.open === true && {this.state.open === true && //if the modal is open mount the list of torrents
<RSSModalList /> <RSSModalList />
} }
</DialogContent> </DialogContent>

View File

@@ -22,7 +22,8 @@ import Icon from 'material-ui/Icon';
import IconButton from 'material-ui/IconButton'; import IconButton from 'material-ui/IconButton';
import RSSTorrentIcon from 'material-ui-icons/RssFeed'; import RSSTorrentIcon from 'material-ui-icons/RssFeed';
import AddRSSIcon from 'material-ui-icons/AddCircle'; import AddRSSIcon from 'material-ui-icons/AddCircle';
import DeleteIcon from 'material-ui-icons/Delete';
import RSSTorrentList from './addRSSTorrentList';
//Redux //Redux
import {connect} from 'react-redux'; import {connect} from 'react-redux';
@@ -54,34 +55,77 @@ const inlineStyle = {
state = { state = {
testRSSFeeds: [], testRSSFeeds: [],
showList: false,
selectedIndex: 0,
}; };
componentWillMount () { componentDidMount () {
console.log("SECONDARY MOUNT", this.props.RSSFeed) console.log("SECONDARY MOUNT", this.props.RSSList)
this.props.RSSModalOpen(true)
}
componentWillUnmount () {
this.props.RSSModalOpen(false)
} }
showRSSFiles = (key) => {
let RSSTorrentsRequest = {
messageType: "rssTorrentsRequest",
Payload: [this.props.RSSList[key].RSSURL]
}
ws.send(JSON.stringify(RSSTorrentsRequest))
this.setState({selectedIndex: key}) //setting our selected index for styling
console.log("RSSFEED", key, "sending message", JSON.stringify(RSSTorrentsRequest))
showRSSFiles = (RSSFeed) => {
console.log("RSSFEED", RSSFeed)
} }
//{this.props.RSSList.map(function(RSSFeed, i){ return ( getStyle = (index) => {
// <ListItem key={i}><ListItemText primary="FEED" /></ListItem> console.log("SettingStye", selectedIndex, index)
// )})} if (selectedIndex == index){
console.log("Returning activestyle")
style = "{{backgroundColor: '#80b3ff'}}"
return style
}
style = "{{backgroundColor: '#f44295'}}"
return style
}
deleteRSSFeed = (key) => {
let RSSURLDelete = {
messageType: "deleteRSSFeed",
Payload: [this.props.RSSList[key]]
}
console.log("Deleting THIS", this.props.RSSList[key])
//ws.send(JSON.stringify(RSSURLDelete));
}
render() { render() {
const { classes, onRequestClose, handleRequestClose, handleSubmit } = this.props; //const { classes, onRequestClose, handleRequestClose, handleSubmit } = this.props;
if (this.props.RSSList.length > 0 && this.state.showList == false){
console.log("Setting list to show....")
this.setState({showList: true})
}
return ( return (
<div style={inlineStyle}> <div style={inlineStyle}>
<List dense> {this.state.showList == true && //if we have any rss torrent feeds then display them in list }
<ListItem button={true} onClick={ () => this.showRSSFiles('RSSFEEDTEST')}> <List dense>
<ListItemIcon > {this.props.RSSList.map((RSSFeed, index) => {
<AddRSSIcon /> return (
</ListItemIcon> <ListItem button={true} onClick={() => this.showRSSFiles(index)} key={index}>
<ListItemText primary={this.props.RSSList[0].RSSURL} /> <ListItemText primary={RSSFeed.RSSName} />
</ListItem> <ListItemSecondaryAction>
</List> <IconButton key={index} onClick={() => this.deleteRSSFeed(index)} aria-label="Delete">
<DeleteIcon />
</IconButton>
</ListItemSecondaryAction>
</ListItem>
)})}
</List>
}
<RSSTorrentList />
</div> </div>
); );
} }
@@ -94,5 +138,9 @@ const mapStateToProps = state => {
}; };
} }
const mapDispatchToProps = dispatch => {
export default connect(mapStateToProps)(RSSModalList) return {
RSSModalOpen: (RSSModalOpen) => dispatch({type: actionTypes.RSS_MODAL_OPEN, RSSModalOpen}), //sending modal state to backendwebsocket so we can update RSS lists
}
}
export default connect(mapStateToProps, mapDispatchToProps)(RSSModalList)

View File

@@ -0,0 +1,126 @@
import React from 'react';
import ReactDOM from 'react-dom';
import Button from 'material-ui/Button';
import {
SortingState, LocalSorting, VirtualTableLayout, SelectionState,
} from '@devexpress/dx-react-grid';
import {
Grid, TableHeaderRow, PagingPanel, VirtualTableView, TableColumnResizing,
DragDropContext, TableColumnReordering, TableSelection,
} from '@devexpress/dx-react-grid-material-ui';
import {connect} from 'react-redux';
import * as actionTypes from '../../store/actions';
const tableStyle = {
}
class RSSTorrentList extends React.Component {
constructor(props) {
super(props);
this.state = { //rows are stored in redux they are sent over from the server
columns: [
{ name: 'TorrentName', title: 'Title'},
{ name: 'TorrentLink', title: 'Magnet Link' },
{ name: 'PublishDate', title: 'Date Published'},
],
sorting: [],
columnOrder: ['TorrentName', 'TorrentLink', 'PublishDate'],
columnWidths: {TorrentName: 450, TorrentLink: 650, PublishDate: 200},
fileSelection: [],
selected: [],
};
this.changeColumnOrder = columnOrder => this.setState({columnOrder});
this.changeColumnWidths = columnWidths => this.setState({columnWidths});
this.changeSorting = sorting => this.setState({sorting});
}
changeSelection = (selection) => {
console.log("TorrentList is changing selection now", selection)
this.setState({selected: selection})
if (selection.length > 0) { //if selection is empty buttons will be default and selectionHashes will be blanked out and pushed to redux
console.log("Getting the selected Rows")
const selectedRows = [] //array of all the selected Rows
selection.forEach(element => {
selectedRows.push(this.props.RSSTorrentList[element]) //pushing the selected rows out of torrentlist
});
this.setState({fileSelection: selectedRows})
}
}
sendMagnetLinks = (priority, sendfileNames) => {
this.state.fileSelection.forEach(element => {
console.log("element", element)
sendFileNames.push(element.FilePath)
})
let setFilePriority = {
MessageType: "setFilePriority",
Payload: sendFileNames,
}
console.log(JSON.stringify(setFilePriority))
ws.send(JSON.stringify(setFilePriority))
}
componentWillReceiveProps () {
console.log("New torrentlist", this.props.RSSTorrentList)
}
render() {
return (
//Buttons here
<div>
<Button raised color="primary" onClick={this.setHighPriority}>
Download Torrents
</Button>
<Grid rows={this.props.RSSTorrentList} columns={this.state.columns}>
<SortingState sorting={this.state.sorting} onSortingChange={this.changeSorting} />
<LocalSorting />
<DragDropContext />
<SelectionState onSelectionChange={this.changeSelection} selection={this.state.selection}/>
<VirtualTableView height={300} />
<TableColumnResizing columnWidths={this.state.columnWidths} onColumnWidthsChange={this.changeColumnWidths}/>
<TableColumnReordering order={this.state.columnOrder} onOrderChange={this.changeColumnOrder} />
<TableSelection selectByRowClick highlightSelected />
<TableHeaderRow allowSorting allowResizing allowDragging />
</Grid>
</div>
);
}
}
const mapStateToProps = state => {
return {
selectionHashes: state.selectionHashes,
RSSTorrentList: state.RSSTorrentList,
//fileSelectionNames: state.fileSelectionNames,
};
}
const mapDispatchToProps = dispatch => {
return {
//changeFileSelection: (fileSelection) => dispatch({type: actionTypes.CHANGE_FILE_SELECTION, fileSelection}),
//sendSelectionHashes: (selectionHashes) => dispatch({type: actionTypes.SELECTION_HASHES, selectionHashes}),
}
}
export default connect(mapStateToProps, mapDispatchToProps)(RSSTorrentList)

View File

@@ -8,4 +8,6 @@ export const SELECTED_TAB = 'SELECTED_TAB';
export const PEER_LIST = 'PEER_LIST'; export const PEER_LIST = 'PEER_LIST';
export const FILE_LIST = 'FILE_LIST'; export const FILE_LIST = 'FILE_LIST';
export const CHANGE_FILE_SELECTION = 'CHANGE_FILE_SELECTION'; export const CHANGE_FILE_SELECTION = 'CHANGE_FILE_SELECTION';
export const NEW_RSS_FEED_STORE = 'NEW_RSS_FEED_STORE'; export const NEW_RSS_FEED_STORE = 'NEW_RSS_FEED_STORE';
export const RSS_MODAL_OPEN = 'RSS_MODAL_OPEN';
export const RSS_TORRENT_LIST = 'RSS_TORRENT_LIST';

View File

@@ -16,6 +16,8 @@ const initialState = {
torrentDetailInfo: [], torrentDetailInfo: [],
selectedTab: 0, selectedTab: 0,
RSSList: [], RSSList: [],
RSSTorrentList: [],
RSSModalOpen: false,
} }
@@ -40,6 +42,13 @@ const reducer = (state = initialState, action) => {
...state, ...state,
RSSList: action.RSSList, RSSList: action.RSSList,
} }
case actionTypes.RSS_TORRENT_LIST:
console.log("New RSS Torrent List IN REDUCER", action.RSSTorrentList)
return {
...state,
RSSTorrentList: action.RSSTorrentList,
}
case actionTypes.SELECTION_HASHES: case actionTypes.SELECTION_HASHES:
console.log("Selection hashes REDUX", action.selectionHashes) console.log("Selection hashes REDUX", action.selectionHashes)
@@ -81,6 +90,12 @@ const reducer = (state = initialState, action) => {
...state, ...state,
selectedTab: action.selectedTab selectedTab: action.selectedTab
} }
case actionTypes.RSS_MODAL_OPEN:
return {
...state,
RSSModalOpen: action.RSSModalOpen
}
case actionTypes.SET_BUTTON_STATE: case actionTypes.SET_BUTTON_STATE:
if (action.buttonState.length === 0) { //if selection is empty buttons will be default and selectionHashes will be blanked out and pushed to redux if (action.buttonState.length === 0) { //if selection is empty buttons will be default and selectionHashes will be blanked out and pushed to redux